blob: b6eb5c1d1db952c88cc0723898a5cb4cbcfb04ca [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips/asm_support_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020020#include "arch/mips/entrypoints_direct_mips.h"
21#include "arch/mips/instruction_set_features_mips.h"
22#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070023#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010024#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020025#include "entrypoints/quick/quick_entrypoints.h"
26#include "entrypoints/quick/quick_entrypoints_enum.h"
27#include "gc/accounting/card_table.h"
28#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070029#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020030#include "mirror/array-inl.h"
31#include "mirror/class-inl.h"
32#include "offsets.h"
33#include "thread.h"
34#include "utils/assembler.h"
35#include "utils/mips/assembler_mips.h"
36#include "utils/stack_checks.h"
37
38namespace art {
39namespace mips {
40
41static constexpr int kCurrentMethodStackOffset = 0;
42static constexpr Register kMethodRegisterArgument = A0;
43
Alexey Frunze4147fcc2017-06-17 19:57:27 -070044// Flags controlling the use of thunks for Baker read barriers.
45constexpr bool kBakerReadBarrierThunksEnableForFields = true;
46constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
47constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
48
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020049Location MipsReturnLocation(Primitive::Type return_type) {
50 switch (return_type) {
51 case Primitive::kPrimBoolean:
52 case Primitive::kPrimByte:
53 case Primitive::kPrimChar:
54 case Primitive::kPrimShort:
55 case Primitive::kPrimInt:
56 case Primitive::kPrimNot:
57 return Location::RegisterLocation(V0);
58
59 case Primitive::kPrimLong:
60 return Location::RegisterPairLocation(V0, V1);
61
62 case Primitive::kPrimFloat:
63 case Primitive::kPrimDouble:
64 return Location::FpuRegisterLocation(F0);
65
66 case Primitive::kPrimVoid:
67 return Location();
68 }
69 UNREACHABLE();
70}
71
72Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
73 return MipsReturnLocation(type);
74}
75
76Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
77 return Location::RegisterLocation(kMethodRegisterArgument);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
81 Location next_location;
82
83 switch (type) {
84 case Primitive::kPrimBoolean:
85 case Primitive::kPrimByte:
86 case Primitive::kPrimChar:
87 case Primitive::kPrimShort:
88 case Primitive::kPrimInt:
89 case Primitive::kPrimNot: {
90 uint32_t gp_index = gp_index_++;
91 if (gp_index < calling_convention.GetNumberOfRegisters()) {
92 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
93 } else {
94 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
95 next_location = Location::StackSlot(stack_offset);
96 }
97 break;
98 }
99
100 case Primitive::kPrimLong: {
101 uint32_t gp_index = gp_index_;
102 gp_index_ += 2;
103 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800104 Register reg = calling_convention.GetRegisterAt(gp_index);
105 if (reg == A1 || reg == A3) {
106 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200107 gp_index++;
108 }
109 Register low_even = calling_convention.GetRegisterAt(gp_index);
110 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
111 DCHECK_EQ(low_even + 1, high_odd);
112 next_location = Location::RegisterPairLocation(low_even, high_odd);
113 } else {
114 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
115 next_location = Location::DoubleStackSlot(stack_offset);
116 }
117 break;
118 }
119
120 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
121 // will take up the even/odd pair, while floats are stored in even regs only.
122 // On 64 bit FPU, both double and float are stored in even registers only.
123 case Primitive::kPrimFloat:
124 case Primitive::kPrimDouble: {
125 uint32_t float_index = float_index_++;
126 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
127 next_location = Location::FpuRegisterLocation(
128 calling_convention.GetFpuRegisterAt(float_index));
129 } else {
130 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
131 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
132 : Location::StackSlot(stack_offset);
133 }
134 break;
135 }
136
137 case Primitive::kPrimVoid:
138 LOG(FATAL) << "Unexpected parameter type " << type;
139 break;
140 }
141
142 // Space on the stack is reserved for all arguments.
143 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
144
145 return next_location;
146}
147
148Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
149 return MipsReturnLocation(type);
150}
151
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100152// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
153#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700154#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200155
156class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
157 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000158 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 LocationSummary* locations = instruction_->GetLocations();
162 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
163 __ Bind(GetEntryLabel());
164 if (instruction_->CanThrowIntoCatchBlock()) {
165 // Live registers will be restored in the catch block if caught.
166 SaveLiveRegisters(codegen, instruction_->GetLocations());
167 }
168 // We're moving two locations to locations that could overlap, so we need a parallel
169 // move resolver.
170 InvokeRuntimeCallingConvention calling_convention;
171 codegen->EmitParallelMoves(locations->InAt(0),
172 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
173 Primitive::kPrimInt,
174 locations->InAt(1),
175 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
176 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100177 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
178 ? kQuickThrowStringBounds
179 : kQuickThrowArrayBounds;
180 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100181 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200182 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
183 }
184
185 bool IsFatal() const OVERRIDE { return true; }
186
187 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
188
189 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200190 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
191};
192
193class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
194 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000195 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200196
197 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
198 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
199 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100200 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200201 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
202 }
203
204 bool IsFatal() const OVERRIDE { return true; }
205
206 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
207
208 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200209 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
210};
211
212class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
213 public:
214 LoadClassSlowPathMIPS(HLoadClass* cls,
215 HInstruction* at,
216 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217 bool do_clinit,
218 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
219 : SlowPathCodeMIPS(at),
220 cls_(cls),
221 dex_pc_(dex_pc),
222 do_clinit_(do_clinit),
223 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200224 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
225 }
226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000228 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700229 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700231 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700232 InvokeRuntimeCallingConvention calling_convention;
233 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
234 const bool is_load_class_bss_entry =
235 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200236 __ Bind(GetEntryLabel());
237 SaveLiveRegisters(codegen, locations);
238
Alexey Frunzec61c0762017-04-10 13:54:23 -0700239 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
240 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700241 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700242 Register temp = locations->GetTemp(0).AsRegister<Register>();
243 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
244 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
245 // kSaveEverything call.
246 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
247 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
248 if (temp_is_a0) {
249 __ Move(entry_address, temp);
250 }
251 }
252
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000253 dex::TypeIndex type_index = cls_->GetTypeIndex();
254 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100255 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
256 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000257 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200258 if (do_clinit_) {
259 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
260 } else {
261 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
262 }
263
Alexey Frunzec61c0762017-04-10 13:54:23 -0700264 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700265 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700266 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700267 DCHECK(bss_info_high_);
268 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
269 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
270 bool reordering = __ SetReorder(false);
271 __ Bind(&info_low->label);
272 __ StoreToOffset(kStoreWord,
273 calling_convention.GetRegisterAt(0),
274 entry_address,
275 /* placeholder */ 0x5678);
276 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700277 }
278
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200279 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200280 if (out.IsValid()) {
281 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000282 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700283 mips_codegen->MoveLocation(out,
284 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
285 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200286 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200287 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700288
289 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700290 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
291 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700292 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700293 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700295 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000296 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700297 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
298 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800299 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700300 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800301 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
302 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000303 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200304 __ B(GetExitLabel());
305 }
306
307 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
308
309 private:
310 // The class this slow path will load.
311 HLoadClass* const cls_;
312
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200313 // The dex PC of `at_`.
314 const uint32_t dex_pc_;
315
316 // Whether to initialize the class.
317 const bool do_clinit_;
318
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700319 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
320 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
321
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200322 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
323};
324
325class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
326 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700327 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
328 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
329 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200330
331 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700332 DCHECK(instruction_->IsLoadString());
333 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200334 LocationSummary* locations = instruction_->GetLocations();
335 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700336 HLoadString* load = instruction_->AsLoadString();
337 const dex::StringIndex string_index = load->GetStringIndex();
338 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200339 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700340 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700341 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200342 __ Bind(GetEntryLabel());
343 SaveLiveRegisters(codegen, locations);
344
Alexey Frunzec61c0762017-04-10 13:54:23 -0700345 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
346 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700347 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700348 Register temp = locations->GetTemp(0).AsRegister<Register>();
349 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
350 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
351 // kSaveEverything call.
352 entry_address = temp_is_a0 ? out : temp;
353 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
354 if (temp_is_a0) {
355 __ Move(entry_address, temp);
356 }
357 }
358
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000359 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100360 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200361 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700362
363 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700364 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700365 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700366 DCHECK(bss_info_high_);
367 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
368 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
369 bool reordering = __ SetReorder(false);
370 __ Bind(&info_low->label);
371 __ StoreToOffset(kStoreWord,
372 calling_convention.GetRegisterAt(0),
373 entry_address,
374 /* placeholder */ 0x5678);
375 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700376 }
377
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200378 Primitive::Type type = instruction_->GetType();
379 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700380 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200381 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200382 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000383
Alexey Frunzec61c0762017-04-10 13:54:23 -0700384 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700385 if (!baker_or_no_read_barriers) {
386 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700387 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700388 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700389 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700390 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunzec61c0762017-04-10 13:54:23 -0700391 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700392 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
393 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700394 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700395 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700396 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
397 __ SetReorder(reordering);
398 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200399 __ B(GetExitLabel());
400 }
401
402 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
403
404 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700405 // Pointer to the high half PC-relative patch info.
406 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
407
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200408 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
409};
410
411class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
412 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000413 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200414
415 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
416 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
417 __ Bind(GetEntryLabel());
418 if (instruction_->CanThrowIntoCatchBlock()) {
419 // Live registers will be restored in the catch block if caught.
420 SaveLiveRegisters(codegen, instruction_->GetLocations());
421 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100422 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200423 instruction_,
424 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100425 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200426 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
427 }
428
429 bool IsFatal() const OVERRIDE { return true; }
430
431 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
432
433 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200434 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
435};
436
437class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
438 public:
439 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000440 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441
442 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Lena Djokicca8c2952017-05-29 11:31:46 +0200443 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200444 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
445 __ Bind(GetEntryLabel());
Lena Djokicca8c2952017-05-29 11:31:46 +0200446 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufca16662016-07-14 09:21:59 +0100447 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200448 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Lena Djokicca8c2952017-05-29 11:31:46 +0200449 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200450 if (successor_ == nullptr) {
451 __ B(GetReturnLabel());
452 } else {
453 __ B(mips_codegen->GetLabelOf(successor_));
454 }
455 }
456
457 MipsLabel* GetReturnLabel() {
458 DCHECK(successor_ == nullptr);
459 return &return_label_;
460 }
461
462 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
463
464 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200465 // If not null, the block to branch to after the suspend check.
466 HBasicBlock* const successor_;
467
468 // If `successor_` is null, the label to branch to after the suspend check.
469 MipsLabel return_label_;
470
471 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
472};
473
474class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
475 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800476 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
477 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200478
479 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
480 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200481 uint32_t dex_pc = instruction_->GetDexPc();
482 DCHECK(instruction_->IsCheckCast()
483 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
484 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
485
486 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800487 if (!is_fatal_) {
488 SaveLiveRegisters(codegen, locations);
489 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200490
491 // We're moving two locations to locations that could overlap, so we need a parallel
492 // move resolver.
493 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800494 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200495 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
496 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800497 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200498 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
499 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200500 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100501 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800502 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200503 Primitive::Type ret_type = instruction_->GetType();
504 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
505 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200506 } else {
507 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800508 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
509 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200510 }
511
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800512 if (!is_fatal_) {
513 RestoreLiveRegisters(codegen, locations);
514 __ B(GetExitLabel());
515 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200516 }
517
518 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
519
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800520 bool IsFatal() const OVERRIDE { return is_fatal_; }
521
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200522 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800523 const bool is_fatal_;
524
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200525 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
526};
527
528class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
529 public:
Aart Bik42249c32016-01-07 15:33:50 -0800530 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000531 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200532
533 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800534 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200535 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100536 LocationSummary* locations = instruction_->GetLocations();
537 SaveLiveRegisters(codegen, locations);
538 InvokeRuntimeCallingConvention calling_convention;
539 __ LoadConst32(calling_convention.GetRegisterAt(0),
540 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100541 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100542 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200543 }
544
545 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
546
547 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200548 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
549};
550
Alexey Frunze15958152017-02-09 19:08:30 -0800551class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
552 public:
553 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
554
555 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
556 LocationSummary* locations = instruction_->GetLocations();
557 __ Bind(GetEntryLabel());
558 SaveLiveRegisters(codegen, locations);
559
560 InvokeRuntimeCallingConvention calling_convention;
561 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
562 parallel_move.AddMove(
563 locations->InAt(0),
564 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
565 Primitive::kPrimNot,
566 nullptr);
567 parallel_move.AddMove(
568 locations->InAt(1),
569 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
570 Primitive::kPrimInt,
571 nullptr);
572 parallel_move.AddMove(
573 locations->InAt(2),
574 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
575 Primitive::kPrimNot,
576 nullptr);
577 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
578
579 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
580 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
581 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
582 RestoreLiveRegisters(codegen, locations);
583 __ B(GetExitLabel());
584 }
585
586 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
587
588 private:
589 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
590};
591
592// Slow path marking an object reference `ref` during a read
593// barrier. The field `obj.field` in the object `obj` holding this
594// reference does not get updated by this slow path after marking (see
595// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
596//
597// This means that after the execution of this slow path, `ref` will
598// always be up-to-date, but `obj.field` may not; i.e., after the
599// flip, `ref` will be a to-space reference, but `obj.field` will
600// probably still be a from-space reference (unless it gets updated by
601// another thread, or if another thread installed another object
602// reference (different from `ref`) in `obj.field`).
603//
604// If `entrypoint` is a valid location it is assumed to already be
605// holding the entrypoint. The case where the entrypoint is passed in
606// is for the GcRoot read barrier.
607class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
608 public:
609 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
610 Location ref,
611 Location entrypoint = Location::NoLocation())
612 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
613 DCHECK(kEmitCompilerReadBarrier);
614 }
615
616 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
617
618 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
619 LocationSummary* locations = instruction_->GetLocations();
620 Register ref_reg = ref_.AsRegister<Register>();
621 DCHECK(locations->CanCall());
622 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
623 DCHECK(instruction_->IsInstanceFieldGet() ||
624 instruction_->IsStaticFieldGet() ||
625 instruction_->IsArrayGet() ||
626 instruction_->IsArraySet() ||
627 instruction_->IsLoadClass() ||
628 instruction_->IsLoadString() ||
629 instruction_->IsInstanceOf() ||
630 instruction_->IsCheckCast() ||
631 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
632 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
633 << "Unexpected instruction in read barrier marking slow path: "
634 << instruction_->DebugName();
635
636 __ Bind(GetEntryLabel());
637 // No need to save live registers; it's taken care of by the
638 // entrypoint. Also, there is no need to update the stack mask,
639 // as this runtime call will not trigger a garbage collection.
640 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
641 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
642 (S2 <= ref_reg && ref_reg <= S7) ||
643 (ref_reg == FP)) << ref_reg;
644 // "Compact" slow path, saving two moves.
645 //
646 // Instead of using the standard runtime calling convention (input
647 // and output in A0 and V0 respectively):
648 //
649 // A0 <- ref
650 // V0 <- ReadBarrierMark(A0)
651 // ref <- V0
652 //
653 // we just use rX (the register containing `ref`) as input and output
654 // of a dedicated entrypoint:
655 //
656 // rX <- ReadBarrierMarkRegX(rX)
657 //
658 if (entrypoint_.IsValid()) {
659 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
660 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
661 __ Jalr(entrypoint_.AsRegister<Register>());
662 __ NopIfNoReordering();
663 } else {
664 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100665 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800666 // This runtime call does not require a stack map.
667 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
668 instruction_,
669 this,
670 /* direct */ false);
671 }
672 __ B(GetExitLabel());
673 }
674
675 private:
676 // The location (register) of the marked object reference.
677 const Location ref_;
678
679 // The location of the entrypoint if already loaded.
680 const Location entrypoint_;
681
682 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
683};
684
685// Slow path marking an object reference `ref` during a read barrier,
686// and if needed, atomically updating the field `obj.field` in the
687// object `obj` holding this reference after marking (contrary to
688// ReadBarrierMarkSlowPathMIPS above, which never tries to update
689// `obj.field`).
690//
691// This means that after the execution of this slow path, both `ref`
692// and `obj.field` will be up-to-date; i.e., after the flip, both will
693// hold the same to-space reference (unless another thread installed
694// another object reference (different from `ref`) in `obj.field`).
695class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
696 public:
697 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
698 Location ref,
699 Register obj,
700 Location field_offset,
701 Register temp1)
702 : SlowPathCodeMIPS(instruction),
703 ref_(ref),
704 obj_(obj),
705 field_offset_(field_offset),
706 temp1_(temp1) {
707 DCHECK(kEmitCompilerReadBarrier);
708 }
709
710 const char* GetDescription() const OVERRIDE {
711 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
712 }
713
714 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
715 LocationSummary* locations = instruction_->GetLocations();
716 Register ref_reg = ref_.AsRegister<Register>();
717 DCHECK(locations->CanCall());
718 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
719 // This slow path is only used by the UnsafeCASObject intrinsic.
720 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
721 << "Unexpected instruction in read barrier marking and field updating slow path: "
722 << instruction_->DebugName();
723 DCHECK(instruction_->GetLocations()->Intrinsified());
724 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
725 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
726
727 __ Bind(GetEntryLabel());
728
729 // Save the old reference.
730 // Note that we cannot use AT or TMP to save the old reference, as those
731 // are used by the code that follows, but we need the old reference after
732 // the call to the ReadBarrierMarkRegX entry point.
733 DCHECK_NE(temp1_, AT);
734 DCHECK_NE(temp1_, TMP);
735 __ Move(temp1_, ref_reg);
736
737 // No need to save live registers; it's taken care of by the
738 // entrypoint. Also, there is no need to update the stack mask,
739 // as this runtime call will not trigger a garbage collection.
740 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
741 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
742 (S2 <= ref_reg && ref_reg <= S7) ||
743 (ref_reg == FP)) << ref_reg;
744 // "Compact" slow path, saving two moves.
745 //
746 // Instead of using the standard runtime calling convention (input
747 // and output in A0 and V0 respectively):
748 //
749 // A0 <- ref
750 // V0 <- ReadBarrierMark(A0)
751 // ref <- V0
752 //
753 // we just use rX (the register containing `ref`) as input and output
754 // of a dedicated entrypoint:
755 //
756 // rX <- ReadBarrierMarkRegX(rX)
757 //
758 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100759 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800760 // This runtime call does not require a stack map.
761 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
762 instruction_,
763 this,
764 /* direct */ false);
765
766 // If the new reference is different from the old reference,
767 // update the field in the holder (`*(obj_ + field_offset_)`).
768 //
769 // Note that this field could also hold a different object, if
770 // another thread had concurrently changed it. In that case, the
771 // the compare-and-set (CAS) loop below would abort, leaving the
772 // field as-is.
773 MipsLabel done;
774 __ Beq(temp1_, ref_reg, &done);
775
776 // Update the the holder's field atomically. This may fail if
777 // mutator updates before us, but it's OK. This is achieved
778 // using a strong compare-and-set (CAS) operation with relaxed
779 // memory synchronization ordering, where the expected value is
780 // the old reference and the desired value is the new reference.
781
782 // Convenience aliases.
783 Register base = obj_;
784 // The UnsafeCASObject intrinsic uses a register pair as field
785 // offset ("long offset"), of which only the low part contains
786 // data.
787 Register offset = field_offset_.AsRegisterPairLow<Register>();
788 Register expected = temp1_;
789 Register value = ref_reg;
790 Register tmp_ptr = TMP; // Pointer to actual memory.
791 Register tmp = AT; // Value in memory.
792
793 __ Addu(tmp_ptr, base, offset);
794
795 if (kPoisonHeapReferences) {
796 __ PoisonHeapReference(expected);
797 // Do not poison `value` if it is the same register as
798 // `expected`, which has just been poisoned.
799 if (value != expected) {
800 __ PoisonHeapReference(value);
801 }
802 }
803
804 // do {
805 // tmp = [r_ptr] - expected;
806 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
807
808 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
809 MipsLabel loop_head, exit_loop;
810 __ Bind(&loop_head);
811 if (is_r6) {
812 __ LlR6(tmp, tmp_ptr);
813 } else {
814 __ LlR2(tmp, tmp_ptr);
815 }
816 __ Bne(tmp, expected, &exit_loop);
817 __ Move(tmp, value);
818 if (is_r6) {
819 __ ScR6(tmp, tmp_ptr);
820 } else {
821 __ ScR2(tmp, tmp_ptr);
822 }
823 __ Beqz(tmp, &loop_head);
824 __ Bind(&exit_loop);
825
826 if (kPoisonHeapReferences) {
827 __ UnpoisonHeapReference(expected);
828 // Do not unpoison `value` if it is the same register as
829 // `expected`, which has just been unpoisoned.
830 if (value != expected) {
831 __ UnpoisonHeapReference(value);
832 }
833 }
834
835 __ Bind(&done);
836 __ B(GetExitLabel());
837 }
838
839 private:
840 // The location (register) of the marked object reference.
841 const Location ref_;
842 // The register containing the object holding the marked object reference field.
843 const Register obj_;
844 // The location of the offset of the marked reference field within `obj_`.
845 Location field_offset_;
846
847 const Register temp1_;
848
849 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
850};
851
852// Slow path generating a read barrier for a heap reference.
853class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
854 public:
855 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
856 Location out,
857 Location ref,
858 Location obj,
859 uint32_t offset,
860 Location index)
861 : SlowPathCodeMIPS(instruction),
862 out_(out),
863 ref_(ref),
864 obj_(obj),
865 offset_(offset),
866 index_(index) {
867 DCHECK(kEmitCompilerReadBarrier);
868 // If `obj` is equal to `out` or `ref`, it means the initial object
869 // has been overwritten by (or after) the heap object reference load
870 // to be instrumented, e.g.:
871 //
872 // __ LoadFromOffset(kLoadWord, out, out, offset);
873 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
874 //
875 // In that case, we have lost the information about the original
876 // object, and the emitted read barrier cannot work properly.
877 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
878 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
879 }
880
881 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
882 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
883 LocationSummary* locations = instruction_->GetLocations();
884 Register reg_out = out_.AsRegister<Register>();
885 DCHECK(locations->CanCall());
886 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
887 DCHECK(instruction_->IsInstanceFieldGet() ||
888 instruction_->IsStaticFieldGet() ||
889 instruction_->IsArrayGet() ||
890 instruction_->IsInstanceOf() ||
891 instruction_->IsCheckCast() ||
892 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
893 << "Unexpected instruction in read barrier for heap reference slow path: "
894 << instruction_->DebugName();
895
896 __ Bind(GetEntryLabel());
897 SaveLiveRegisters(codegen, locations);
898
899 // We may have to change the index's value, but as `index_` is a
900 // constant member (like other "inputs" of this slow path),
901 // introduce a copy of it, `index`.
902 Location index = index_;
903 if (index_.IsValid()) {
904 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
905 if (instruction_->IsArrayGet()) {
906 // Compute the actual memory offset and store it in `index`.
907 Register index_reg = index_.AsRegister<Register>();
908 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
909 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
910 // We are about to change the value of `index_reg` (see the
911 // calls to art::mips::MipsAssembler::Sll and
912 // art::mips::MipsAssembler::Addiu32 below), but it has
913 // not been saved by the previous call to
914 // art::SlowPathCode::SaveLiveRegisters, as it is a
915 // callee-save register --
916 // art::SlowPathCode::SaveLiveRegisters does not consider
917 // callee-save registers, as it has been designed with the
918 // assumption that callee-save registers are supposed to be
919 // handled by the called function. So, as a callee-save
920 // register, `index_reg` _would_ eventually be saved onto
921 // the stack, but it would be too late: we would have
922 // changed its value earlier. Therefore, we manually save
923 // it here into another freely available register,
924 // `free_reg`, chosen of course among the caller-save
925 // registers (as a callee-save `free_reg` register would
926 // exhibit the same problem).
927 //
928 // Note we could have requested a temporary register from
929 // the register allocator instead; but we prefer not to, as
930 // this is a slow path, and we know we can find a
931 // caller-save register that is available.
932 Register free_reg = FindAvailableCallerSaveRegister(codegen);
933 __ Move(free_reg, index_reg);
934 index_reg = free_reg;
935 index = Location::RegisterLocation(index_reg);
936 } else {
937 // The initial register stored in `index_` has already been
938 // saved in the call to art::SlowPathCode::SaveLiveRegisters
939 // (as it is not a callee-save register), so we can freely
940 // use it.
941 }
942 // Shifting the index value contained in `index_reg` by the scale
943 // factor (2) cannot overflow in practice, as the runtime is
944 // unable to allocate object arrays with a size larger than
945 // 2^26 - 1 (that is, 2^28 - 4 bytes).
946 __ Sll(index_reg, index_reg, TIMES_4);
947 static_assert(
948 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
949 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
950 __ Addiu32(index_reg, index_reg, offset_);
951 } else {
952 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
953 // intrinsics, `index_` is not shifted by a scale factor of 2
954 // (as in the case of ArrayGet), as it is actually an offset
955 // to an object field within an object.
956 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
957 DCHECK(instruction_->GetLocations()->Intrinsified());
958 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
959 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
960 << instruction_->AsInvoke()->GetIntrinsic();
961 DCHECK_EQ(offset_, 0U);
962 DCHECK(index_.IsRegisterPair());
963 // UnsafeGet's offset location is a register pair, the low
964 // part contains the correct offset.
965 index = index_.ToLow();
966 }
967 }
968
969 // We're moving two or three locations to locations that could
970 // overlap, so we need a parallel move resolver.
971 InvokeRuntimeCallingConvention calling_convention;
972 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
973 parallel_move.AddMove(ref_,
974 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
975 Primitive::kPrimNot,
976 nullptr);
977 parallel_move.AddMove(obj_,
978 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
979 Primitive::kPrimNot,
980 nullptr);
981 if (index.IsValid()) {
982 parallel_move.AddMove(index,
983 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
984 Primitive::kPrimInt,
985 nullptr);
986 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
987 } else {
988 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
989 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
990 }
991 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
992 instruction_,
993 instruction_->GetDexPc(),
994 this);
995 CheckEntrypointTypes<
996 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
Lena Djokic8098da92017-06-28 12:07:50 +0200997 mips_codegen->MoveLocation(out_,
998 calling_convention.GetReturnLocation(Primitive::kPrimNot),
999 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001000
1001 RestoreLiveRegisters(codegen, locations);
1002 __ B(GetExitLabel());
1003 }
1004
1005 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
1006
1007 private:
1008 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
1009 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1010 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1011 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1012 if (i != ref &&
1013 i != obj &&
1014 !codegen->IsCoreCalleeSaveRegister(i) &&
1015 !codegen->IsBlockedCoreRegister(i)) {
1016 return static_cast<Register>(i);
1017 }
1018 }
1019 // We shall never fail to find a free caller-save register, as
1020 // there are more than two core caller-save registers on MIPS
1021 // (meaning it is possible to find one which is different from
1022 // `ref` and `obj`).
1023 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1024 LOG(FATAL) << "Could not find a free caller-save register";
1025 UNREACHABLE();
1026 }
1027
1028 const Location out_;
1029 const Location ref_;
1030 const Location obj_;
1031 const uint32_t offset_;
1032 // An additional location containing an index to an array.
1033 // Only used for HArrayGet and the UnsafeGetObject &
1034 // UnsafeGetObjectVolatile intrinsics.
1035 const Location index_;
1036
1037 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1038};
1039
1040// Slow path generating a read barrier for a GC root.
1041class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1042 public:
1043 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1044 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1045 DCHECK(kEmitCompilerReadBarrier);
1046 }
1047
1048 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1049 LocationSummary* locations = instruction_->GetLocations();
1050 Register reg_out = out_.AsRegister<Register>();
1051 DCHECK(locations->CanCall());
1052 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1053 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1054 << "Unexpected instruction in read barrier for GC root slow path: "
1055 << instruction_->DebugName();
1056
1057 __ Bind(GetEntryLabel());
1058 SaveLiveRegisters(codegen, locations);
1059
1060 InvokeRuntimeCallingConvention calling_convention;
1061 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Lena Djokic8098da92017-06-28 12:07:50 +02001062 mips_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1063 root_,
1064 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001065 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1066 instruction_,
1067 instruction_->GetDexPc(),
1068 this);
1069 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
Lena Djokic8098da92017-06-28 12:07:50 +02001070 mips_codegen->MoveLocation(out_,
1071 calling_convention.GetReturnLocation(Primitive::kPrimNot),
1072 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001073
1074 RestoreLiveRegisters(codegen, locations);
1075 __ B(GetExitLabel());
1076 }
1077
1078 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1079
1080 private:
1081 const Location out_;
1082 const Location root_;
1083
1084 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1085};
1086
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001087CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1088 const MipsInstructionSetFeatures& isa_features,
1089 const CompilerOptions& compiler_options,
1090 OptimizingCompilerStats* stats)
1091 : CodeGenerator(graph,
1092 kNumberOfCoreRegisters,
1093 kNumberOfFRegisters,
1094 kNumberOfRegisterPairs,
1095 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1096 arraysize(kCoreCalleeSaves)),
1097 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1098 arraysize(kFpuCalleeSaves)),
1099 compiler_options,
1100 stats),
1101 block_labels_(nullptr),
1102 location_builder_(graph, this),
1103 instruction_visitor_(graph, this),
1104 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001105 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001106 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001107 uint32_literals_(std::less<uint32_t>(),
1108 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001109 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001110 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001111 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001112 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001113 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001114 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1115 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001116 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001117 // Save RA (containing the return address) to mimic Quick.
1118 AddAllocatedRegister(Location::RegisterLocation(RA));
1119}
1120
1121#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001122// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1123#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001124#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001125
1126void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1127 // Ensure that we fix up branches.
1128 __ FinalizeCode();
1129
1130 // Adjust native pc offsets in stack maps.
1131 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001132 uint32_t old_position =
1133 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001134 uint32_t new_position = __ GetAdjustedPosition(old_position);
1135 DCHECK_GE(new_position, old_position);
1136 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1137 }
1138
1139 // Adjust pc offsets for the disassembly information.
1140 if (disasm_info_ != nullptr) {
1141 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1142 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1143 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1144 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1145 it.second.start = __ GetAdjustedPosition(it.second.start);
1146 it.second.end = __ GetAdjustedPosition(it.second.end);
1147 }
1148 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1149 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1150 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1151 }
1152 }
1153
1154 CodeGenerator::Finalize(allocator);
1155}
1156
1157MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1158 return codegen_->GetAssembler();
1159}
1160
1161void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1162 DCHECK_LT(index, moves_.size());
1163 MoveOperands* move = moves_[index];
1164 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1165}
1166
1167void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1168 DCHECK_LT(index, moves_.size());
1169 MoveOperands* move = moves_[index];
1170 Primitive::Type type = move->GetType();
1171 Location loc1 = move->GetDestination();
1172 Location loc2 = move->GetSource();
1173
1174 DCHECK(!loc1.IsConstant());
1175 DCHECK(!loc2.IsConstant());
1176
1177 if (loc1.Equals(loc2)) {
1178 return;
1179 }
1180
1181 if (loc1.IsRegister() && loc2.IsRegister()) {
1182 // Swap 2 GPRs.
1183 Register r1 = loc1.AsRegister<Register>();
1184 Register r2 = loc2.AsRegister<Register>();
1185 __ Move(TMP, r2);
1186 __ Move(r2, r1);
1187 __ Move(r1, TMP);
1188 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1189 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1190 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1191 if (type == Primitive::kPrimFloat) {
1192 __ MovS(FTMP, f2);
1193 __ MovS(f2, f1);
1194 __ MovS(f1, FTMP);
1195 } else {
1196 DCHECK_EQ(type, Primitive::kPrimDouble);
1197 __ MovD(FTMP, f2);
1198 __ MovD(f2, f1);
1199 __ MovD(f1, FTMP);
1200 }
1201 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1202 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1203 // Swap FPR and GPR.
1204 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1205 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1206 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001207 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001208 __ Move(TMP, r2);
1209 __ Mfc1(r2, f1);
1210 __ Mtc1(TMP, f1);
1211 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1212 // Swap 2 GPR register pairs.
1213 Register r1 = loc1.AsRegisterPairLow<Register>();
1214 Register r2 = loc2.AsRegisterPairLow<Register>();
1215 __ Move(TMP, r2);
1216 __ Move(r2, r1);
1217 __ Move(r1, TMP);
1218 r1 = loc1.AsRegisterPairHigh<Register>();
1219 r2 = loc2.AsRegisterPairHigh<Register>();
1220 __ Move(TMP, r2);
1221 __ Move(r2, r1);
1222 __ Move(r1, TMP);
1223 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1224 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1225 // Swap FPR and GPR register pair.
1226 DCHECK_EQ(type, Primitive::kPrimDouble);
1227 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1228 : loc2.AsFpuRegister<FRegister>();
1229 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1230 : loc2.AsRegisterPairLow<Register>();
1231 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1232 : loc2.AsRegisterPairHigh<Register>();
1233 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1234 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1235 // unpredictable and the following mfch1 will fail.
1236 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001237 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001238 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001239 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001240 __ Move(r2_l, TMP);
1241 __ Move(r2_h, AT);
1242 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1243 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1244 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1245 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001246 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1247 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001248 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1249 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001250 __ Move(TMP, reg);
1251 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1252 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1253 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1254 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1255 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1256 : loc2.AsRegisterPairLow<Register>();
1257 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1258 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001259 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001260 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1261 : loc2.GetHighStackIndex(kMipsWordSize);
1262 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001263 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001264 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001265 __ Move(TMP, reg_h);
1266 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1267 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001268 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1269 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1270 : loc2.AsFpuRegister<FRegister>();
1271 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1272 if (type == Primitive::kPrimFloat) {
1273 __ MovS(FTMP, reg);
1274 __ LoadSFromOffset(reg, SP, offset);
1275 __ StoreSToOffset(FTMP, SP, offset);
1276 } else {
1277 DCHECK_EQ(type, Primitive::kPrimDouble);
1278 __ MovD(FTMP, reg);
1279 __ LoadDFromOffset(reg, SP, offset);
1280 __ StoreDToOffset(FTMP, SP, offset);
1281 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001282 } else {
1283 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1284 }
1285}
1286
1287void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1288 __ Pop(static_cast<Register>(reg));
1289}
1290
1291void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1292 __ Push(static_cast<Register>(reg));
1293}
1294
1295void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1296 // Allocate a scratch register other than TMP, if available.
1297 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1298 // automatically unspilled when the scratch scope object is destroyed).
1299 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1300 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1301 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1302 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1303 __ LoadFromOffset(kLoadWord,
1304 Register(ensure_scratch.GetRegister()),
1305 SP,
1306 index1 + stack_offset);
1307 __ LoadFromOffset(kLoadWord,
1308 TMP,
1309 SP,
1310 index2 + stack_offset);
1311 __ StoreToOffset(kStoreWord,
1312 Register(ensure_scratch.GetRegister()),
1313 SP,
1314 index2 + stack_offset);
1315 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1316 }
1317}
1318
Alexey Frunze73296a72016-06-03 22:51:46 -07001319void CodeGeneratorMIPS::ComputeSpillMask() {
1320 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1321 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1322 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1323 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1324 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1325 // within the stack frame.
1326 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1327 core_spill_mask_ |= (1 << ZERO);
1328 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001329}
1330
1331bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001332 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001333 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1334 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1335 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001336 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001337}
1338
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001339static dwarf::Reg DWARFReg(Register reg) {
1340 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1341}
1342
1343// TODO: mapping of floating-point registers to DWARF.
1344
1345void CodeGeneratorMIPS::GenerateFrameEntry() {
1346 __ Bind(&frame_entry_label_);
1347
1348 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1349
1350 if (do_overflow_check) {
1351 __ LoadFromOffset(kLoadWord,
1352 ZERO,
1353 SP,
1354 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1355 RecordPcInfo(nullptr, 0);
1356 }
1357
1358 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001359 CHECK_EQ(fpu_spill_mask_, 0u);
1360 CHECK_EQ(core_spill_mask_, 1u << RA);
1361 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001362 return;
1363 }
1364
1365 // Make sure the frame size isn't unreasonably large.
1366 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1367 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1368 }
1369
1370 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001371
Alexey Frunze73296a72016-06-03 22:51:46 -07001372 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001373 __ IncreaseFrameSize(ofs);
1374
Alexey Frunze73296a72016-06-03 22:51:46 -07001375 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1376 Register reg = static_cast<Register>(MostSignificantBit(mask));
1377 mask ^= 1u << reg;
1378 ofs -= kMipsWordSize;
1379 // The ZERO register is only included for alignment.
1380 if (reg != ZERO) {
1381 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001382 __ cfi().RelOffset(DWARFReg(reg), ofs);
1383 }
1384 }
1385
Alexey Frunze73296a72016-06-03 22:51:46 -07001386 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1387 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1388 mask ^= 1u << reg;
1389 ofs -= kMipsDoublewordSize;
1390 __ StoreDToOffset(reg, SP, ofs);
1391 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001392 }
1393
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001394 // Save the current method if we need it. Note that we do not
1395 // do this in HCurrentMethod, as the instruction might have been removed
1396 // in the SSA graph.
1397 if (RequiresCurrentMethod()) {
1398 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1399 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001400
1401 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1402 // Initialize should deoptimize flag to 0.
1403 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1404 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001405}
1406
1407void CodeGeneratorMIPS::GenerateFrameExit() {
1408 __ cfi().RememberState();
1409
1410 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001411 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001412
Alexey Frunze73296a72016-06-03 22:51:46 -07001413 // For better instruction scheduling restore RA before other registers.
1414 uint32_t ofs = GetFrameSize();
1415 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1416 Register reg = static_cast<Register>(MostSignificantBit(mask));
1417 mask ^= 1u << reg;
1418 ofs -= kMipsWordSize;
1419 // The ZERO register is only included for alignment.
1420 if (reg != ZERO) {
1421 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001422 __ cfi().Restore(DWARFReg(reg));
1423 }
1424 }
1425
Alexey Frunze73296a72016-06-03 22:51:46 -07001426 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1427 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1428 mask ^= 1u << reg;
1429 ofs -= kMipsDoublewordSize;
1430 __ LoadDFromOffset(reg, SP, ofs);
1431 // TODO: __ cfi().Restore(DWARFReg(reg));
1432 }
1433
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001434 size_t frame_size = GetFrameSize();
1435 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1436 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1437 bool reordering = __ SetReorder(false);
1438 if (exchange) {
1439 __ Jr(RA);
1440 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1441 } else {
1442 __ DecreaseFrameSize(frame_size);
1443 __ Jr(RA);
1444 __ Nop(); // In delay slot.
1445 }
1446 __ SetReorder(reordering);
1447 } else {
1448 __ Jr(RA);
1449 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001450 }
1451
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001452 __ cfi().RestoreState();
1453 __ cfi().DefCFAOffset(GetFrameSize());
1454}
1455
1456void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1457 __ Bind(GetLabelOf(block));
1458}
1459
Lena Djokicca8c2952017-05-29 11:31:46 +02001460VectorRegister VectorRegisterFrom(Location location) {
1461 DCHECK(location.IsFpuRegister());
1462 return static_cast<VectorRegister>(location.AsFpuRegister<FRegister>());
1463}
1464
Lena Djokic8098da92017-06-28 12:07:50 +02001465void CodeGeneratorMIPS::MoveLocation(Location destination,
1466 Location source,
1467 Primitive::Type dst_type) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001468 if (source.Equals(destination)) {
1469 return;
1470 }
1471
Lena Djokic8098da92017-06-28 12:07:50 +02001472 if (source.IsConstant()) {
1473 MoveConstant(destination, source.GetConstant());
1474 } else {
1475 if (destination.IsRegister()) {
1476 if (source.IsRegister()) {
1477 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1478 } else if (source.IsFpuRegister()) {
1479 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1480 } else {
1481 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001482 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001483 }
1484 } else if (destination.IsRegisterPair()) {
1485 if (source.IsRegisterPair()) {
1486 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1487 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1488 } else if (source.IsFpuRegister()) {
1489 Register dst_high = destination.AsRegisterPairHigh<Register>();
1490 Register dst_low = destination.AsRegisterPairLow<Register>();
1491 FRegister src = source.AsFpuRegister<FRegister>();
1492 __ Mfc1(dst_low, src);
1493 __ MoveFromFpuHigh(dst_high, src);
1494 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001495 DCHECK(source.IsDoubleStackSlot())
1496 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001497 int32_t off = source.GetStackIndex();
1498 Register r = destination.AsRegisterPairLow<Register>();
1499 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1500 }
1501 } else if (destination.IsFpuRegister()) {
1502 if (source.IsRegister()) {
1503 DCHECK(!Primitive::Is64BitType(dst_type));
1504 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1505 } else if (source.IsRegisterPair()) {
1506 DCHECK(Primitive::Is64BitType(dst_type));
1507 FRegister dst = destination.AsFpuRegister<FRegister>();
1508 Register src_high = source.AsRegisterPairHigh<Register>();
1509 Register src_low = source.AsRegisterPairLow<Register>();
1510 __ Mtc1(src_low, dst);
1511 __ MoveToFpuHigh(src_high, dst);
1512 } else if (source.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001513 if (GetGraph()->HasSIMD()) {
1514 __ MoveV(VectorRegisterFrom(destination),
1515 VectorRegisterFrom(source));
Lena Djokic8098da92017-06-28 12:07:50 +02001516 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001517 if (Primitive::Is64BitType(dst_type)) {
1518 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1519 } else {
1520 DCHECK_EQ(dst_type, Primitive::kPrimFloat);
1521 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1522 }
Lena Djokic8098da92017-06-28 12:07:50 +02001523 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001524 } else if (source.IsSIMDStackSlot()) {
1525 __ LoadQFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001526 } else if (source.IsDoubleStackSlot()) {
1527 DCHECK(Primitive::Is64BitType(dst_type));
1528 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1529 } else {
1530 DCHECK(!Primitive::Is64BitType(dst_type));
1531 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1532 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1533 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001534 } else if (destination.IsSIMDStackSlot()) {
1535 if (source.IsFpuRegister()) {
1536 __ StoreQToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1537 } else {
1538 DCHECK(source.IsSIMDStackSlot());
1539 __ LoadQFromOffset(FTMP, SP, source.GetStackIndex());
1540 __ StoreQToOffset(FTMP, SP, destination.GetStackIndex());
1541 }
Lena Djokic8098da92017-06-28 12:07:50 +02001542 } else if (destination.IsDoubleStackSlot()) {
1543 int32_t dst_offset = destination.GetStackIndex();
1544 if (source.IsRegisterPair()) {
1545 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset);
1546 } else if (source.IsFpuRegister()) {
1547 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1548 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001549 DCHECK(source.IsDoubleStackSlot())
1550 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001551 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1552 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1553 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1554 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset + 4);
1555 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001556 } else {
Lena Djokic8098da92017-06-28 12:07:50 +02001557 DCHECK(destination.IsStackSlot()) << destination;
1558 int32_t dst_offset = destination.GetStackIndex();
1559 if (source.IsRegister()) {
1560 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, dst_offset);
1561 } else if (source.IsFpuRegister()) {
1562 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1563 } else {
1564 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1565 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1566 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1567 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001568 }
1569 }
1570}
1571
1572void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1573 if (c->IsIntConstant() || c->IsNullConstant()) {
1574 // Move 32 bit constant.
1575 int32_t value = GetInt32ValueOf(c);
1576 if (destination.IsRegister()) {
1577 Register dst = destination.AsRegister<Register>();
1578 __ LoadConst32(dst, value);
1579 } else {
1580 DCHECK(destination.IsStackSlot())
1581 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001582 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001583 }
1584 } else if (c->IsLongConstant()) {
1585 // Move 64 bit constant.
1586 int64_t value = GetInt64ValueOf(c);
1587 if (destination.IsRegisterPair()) {
1588 Register r_h = destination.AsRegisterPairHigh<Register>();
1589 Register r_l = destination.AsRegisterPairLow<Register>();
1590 __ LoadConst64(r_h, r_l, value);
1591 } else {
1592 DCHECK(destination.IsDoubleStackSlot())
1593 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001594 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001595 }
1596 } else if (c->IsFloatConstant()) {
1597 // Move 32 bit float constant.
1598 int32_t value = GetInt32ValueOf(c);
1599 if (destination.IsFpuRegister()) {
1600 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1601 } else {
1602 DCHECK(destination.IsStackSlot())
1603 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001604 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001605 }
1606 } else {
1607 // Move 64 bit double constant.
1608 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1609 int64_t value = GetInt64ValueOf(c);
1610 if (destination.IsFpuRegister()) {
1611 FRegister fd = destination.AsFpuRegister<FRegister>();
1612 __ LoadDConst64(fd, value, TMP);
1613 } else {
1614 DCHECK(destination.IsDoubleStackSlot())
1615 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001616 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001617 }
1618 }
1619}
1620
1621void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1622 DCHECK(destination.IsRegister());
1623 Register dst = destination.AsRegister<Register>();
1624 __ LoadConst32(dst, value);
1625}
1626
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001627void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1628 if (location.IsRegister()) {
1629 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001630 } else if (location.IsRegisterPair()) {
1631 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1632 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001633 } else {
1634 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1635 }
1636}
1637
Vladimir Markoaad75c62016-10-03 08:46:48 +00001638template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1639inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1640 const ArenaDeque<PcRelativePatchInfo>& infos,
1641 ArenaVector<LinkerPatch>* linker_patches) {
1642 for (const PcRelativePatchInfo& info : infos) {
1643 const DexFile& dex_file = info.target_dex_file;
1644 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001645 DCHECK(info.label.IsBound());
1646 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001647 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1648 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001649 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1650 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1651 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001652 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001653 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001654 }
1655}
1656
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001657void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1658 DCHECK(linker_patches->empty());
1659 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001660 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001661 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001662 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001663 type_bss_entry_patches_.size() +
1664 pc_relative_string_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001665 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001666 if (GetCompilerOptions().IsBootImage()) {
1667 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001668 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001669 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1670 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001671 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1672 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001673 } else {
1674 DCHECK(pc_relative_method_patches_.empty());
1675 DCHECK(pc_relative_type_patches_.empty());
1676 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1677 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001678 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001679 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1680 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001681 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1682 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001683 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001684}
1685
Vladimir Marko65979462017-05-19 17:25:12 +01001686CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001687 MethodReference target_method,
1688 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001689 return NewPcRelativePatch(*target_method.dex_file,
1690 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001691 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001692 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001693}
1694
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001695CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001696 MethodReference target_method,
1697 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001698 return NewPcRelativePatch(*target_method.dex_file,
1699 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001700 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001701 &method_bss_entry_patches_);
1702}
1703
Alexey Frunze06a46c42016-07-19 15:00:40 -07001704CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001705 const DexFile& dex_file,
1706 dex::TypeIndex type_index,
1707 const PcRelativePatchInfo* info_high) {
1708 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001709}
1710
Vladimir Marko1998cd02017-01-13 13:02:58 +00001711CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001712 const DexFile& dex_file,
1713 dex::TypeIndex type_index,
1714 const PcRelativePatchInfo* info_high) {
1715 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001716}
1717
Vladimir Marko65979462017-05-19 17:25:12 +01001718CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001719 const DexFile& dex_file,
1720 dex::StringIndex string_index,
1721 const PcRelativePatchInfo* info_high) {
1722 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001723}
1724
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001725CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001726 const DexFile& dex_file,
1727 uint32_t offset_or_index,
1728 const PcRelativePatchInfo* info_high,
1729 ArenaDeque<PcRelativePatchInfo>* patches) {
1730 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001731 return &patches->back();
1732}
1733
Alexey Frunze06a46c42016-07-19 15:00:40 -07001734Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1735 return map->GetOrCreate(
1736 value,
1737 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1738}
1739
Alexey Frunze06a46c42016-07-19 15:00:40 -07001740Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001741 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001742}
1743
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001744void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001745 Register out,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001746 Register base,
1747 PcRelativePatchInfo* info_low) {
1748 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001749 DCHECK_NE(out, base);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001750 if (GetInstructionSetFeatures().IsR6()) {
1751 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001752 __ Bind(&info_high->label);
1753 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001754 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001755 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001756 } else {
1757 // If base is ZERO, emit NAL to obtain the actual base.
1758 if (base == ZERO) {
1759 // Generate a dummy PC-relative call to obtain PC.
1760 __ Nal();
1761 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001762 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001763 __ Lui(out, /* placeholder */ 0x1234);
1764 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1765 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1766 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001767 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001768 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001769 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001770 __ Addu(out, out, (base == ZERO) ? RA : base);
1771 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001772 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001773 // offset to `out` (e.g. lw, jialc, addiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001774 if (info_low != nullptr) {
1775 DCHECK_EQ(info_low->patch_info_high, info_high);
1776 __ Bind(&info_low->label);
1777 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001778}
1779
Alexey Frunze627c1a02017-01-30 19:28:14 -08001780CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1781 const DexFile& dex_file,
1782 dex::StringIndex dex_index,
1783 Handle<mirror::String> handle) {
1784 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1785 reinterpret_cast64<uint64_t>(handle.GetReference()));
1786 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1787 return &jit_string_patches_.back();
1788}
1789
1790CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1791 const DexFile& dex_file,
1792 dex::TypeIndex dex_index,
1793 Handle<mirror::Class> handle) {
1794 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1795 reinterpret_cast64<uint64_t>(handle.GetReference()));
1796 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1797 return &jit_class_patches_.back();
1798}
1799
1800void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1801 const uint8_t* roots_data,
1802 const CodeGeneratorMIPS::JitPatchInfo& info,
1803 uint64_t index_in_table) const {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001804 uint32_t high_literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1805 uint32_t low_literal_offset = GetAssembler().GetLabelLocation(&info.low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001806 uintptr_t address =
1807 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1808 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1809 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001810 DCHECK_EQ(code[high_literal_offset + 0], 0x34);
1811 DCHECK_EQ(code[high_literal_offset + 1], 0x12);
1812 DCHECK_EQ((code[high_literal_offset + 2] & 0xE0), 0x00);
1813 DCHECK_EQ(code[high_literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001814 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001815 DCHECK_EQ(code[low_literal_offset + 0], 0x78);
1816 DCHECK_EQ(code[low_literal_offset + 1], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001817 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001818 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001819 code[high_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1820 code[high_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001821 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001822 code[low_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 0);
1823 code[low_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 8);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001824}
1825
1826void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1827 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001828 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1829 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001830 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001831 uint64_t index_in_table = it->second;
1832 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001833 }
1834 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001835 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1836 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001837 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001838 uint64_t index_in_table = it->second;
1839 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001840 }
1841}
1842
Goran Jakovljevice114da22016-12-26 14:21:43 +01001843void CodeGeneratorMIPS::MarkGCCard(Register object,
1844 Register value,
1845 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001846 MipsLabel done;
1847 Register card = AT;
1848 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001849 if (value_can_be_null) {
1850 __ Beqz(value, &done);
1851 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001852 __ LoadFromOffset(kLoadWord,
1853 card,
1854 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001855 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001856 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1857 __ Addu(temp, card, temp);
1858 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001859 if (value_can_be_null) {
1860 __ Bind(&done);
1861 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001862}
1863
David Brazdil58282f42016-01-14 12:45:10 +00001864void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001865 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1866 blocked_core_registers_[ZERO] = true;
1867 blocked_core_registers_[K0] = true;
1868 blocked_core_registers_[K1] = true;
1869 blocked_core_registers_[GP] = true;
1870 blocked_core_registers_[SP] = true;
1871 blocked_core_registers_[RA] = true;
1872
1873 // AT and TMP(T8) are used as temporary/scratch registers
1874 // (similar to how AT is used by MIPS assemblers).
1875 blocked_core_registers_[AT] = true;
1876 blocked_core_registers_[TMP] = true;
1877 blocked_fpu_registers_[FTMP] = true;
1878
1879 // Reserve suspend and thread registers.
1880 blocked_core_registers_[S0] = true;
1881 blocked_core_registers_[TR] = true;
1882
1883 // Reserve T9 for function calls
1884 blocked_core_registers_[T9] = true;
1885
1886 // Reserve odd-numbered FPU registers.
1887 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1888 blocked_fpu_registers_[i] = true;
1889 }
1890
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001891 if (GetGraph()->IsDebuggable()) {
1892 // Stubs do not save callee-save floating point registers. If the graph
1893 // is debuggable, we need to deal with these registers differently. For
1894 // now, just block them.
1895 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1896 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1897 }
1898 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001899}
1900
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001901size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1902 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1903 return kMipsWordSize;
1904}
1905
1906size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1907 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1908 return kMipsWordSize;
1909}
1910
1911size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001912 if (GetGraph()->HasSIMD()) {
1913 __ StoreQToOffset(FRegister(reg_id), SP, stack_index);
1914 } else {
1915 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1916 }
1917 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001918}
1919
1920size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001921 if (GetGraph()->HasSIMD()) {
1922 __ LoadQFromOffset(FRegister(reg_id), SP, stack_index);
1923 } else {
1924 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1925 }
1926 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001927}
1928
1929void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001930 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001931}
1932
1933void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001934 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001935}
1936
Serban Constantinescufca16662016-07-14 09:21:59 +01001937constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1938
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001939void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1940 HInstruction* instruction,
1941 uint32_t dex_pc,
1942 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001943 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001944 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1945 IsDirectEntrypoint(entrypoint));
1946 if (EntrypointRequiresStackMap(entrypoint)) {
1947 RecordPcInfo(instruction, dex_pc, slow_path);
1948 }
1949}
1950
1951void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1952 HInstruction* instruction,
1953 SlowPathCode* slow_path,
1954 bool direct) {
1955 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1956 GenerateInvokeRuntime(entry_point_offset, direct);
1957}
1958
1959void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001960 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001961 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001962 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001963 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001964 // Reserve argument space on stack (for $a0-$a3) for
1965 // entrypoints that directly reference native implementations.
1966 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001967 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001968 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001969 } else {
1970 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001971 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001972 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001973}
1974
1975void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1976 Register class_reg) {
1977 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1978 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1979 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1980 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1981 __ Sync(0);
1982 __ Bind(slow_path->GetExitLabel());
1983}
1984
1985void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1986 __ Sync(0); // Only stype 0 is supported.
1987}
1988
1989void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1990 HBasicBlock* successor) {
1991 SuspendCheckSlowPathMIPS* slow_path =
1992 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1993 codegen_->AddSlowPath(slow_path);
1994
1995 __ LoadFromOffset(kLoadUnsignedHalfword,
1996 TMP,
1997 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001998 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001999 if (successor == nullptr) {
2000 __ Bnez(TMP, slow_path->GetEntryLabel());
2001 __ Bind(slow_path->GetReturnLabel());
2002 } else {
2003 __ Beqz(TMP, codegen_->GetLabelOf(successor));
2004 __ B(slow_path->GetEntryLabel());
2005 // slow_path will return to GetLabelOf(successor).
2006 }
2007}
2008
2009InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
2010 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002011 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002012 assembler_(codegen->GetAssembler()),
2013 codegen_(codegen) {}
2014
2015void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2016 DCHECK_EQ(instruction->InputCount(), 2U);
2017 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2018 Primitive::Type type = instruction->GetResultType();
2019 switch (type) {
2020 case Primitive::kPrimInt: {
2021 locations->SetInAt(0, Location::RequiresRegister());
2022 HInstruction* right = instruction->InputAt(1);
2023 bool can_use_imm = false;
2024 if (right->IsConstant()) {
2025 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
2026 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
2027 can_use_imm = IsUint<16>(imm);
2028 } else if (instruction->IsAdd()) {
2029 can_use_imm = IsInt<16>(imm);
2030 } else {
2031 DCHECK(instruction->IsSub());
2032 can_use_imm = IsInt<16>(-imm);
2033 }
2034 }
2035 if (can_use_imm)
2036 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
2037 else
2038 locations->SetInAt(1, Location::RequiresRegister());
2039 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2040 break;
2041 }
2042
2043 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002044 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002045 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2046 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002047 break;
2048 }
2049
2050 case Primitive::kPrimFloat:
2051 case Primitive::kPrimDouble:
2052 DCHECK(instruction->IsAdd() || instruction->IsSub());
2053 locations->SetInAt(0, Location::RequiresFpuRegister());
2054 locations->SetInAt(1, Location::RequiresFpuRegister());
2055 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2056 break;
2057
2058 default:
2059 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2060 }
2061}
2062
2063void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2064 Primitive::Type type = instruction->GetType();
2065 LocationSummary* locations = instruction->GetLocations();
2066
2067 switch (type) {
2068 case Primitive::kPrimInt: {
2069 Register dst = locations->Out().AsRegister<Register>();
2070 Register lhs = locations->InAt(0).AsRegister<Register>();
2071 Location rhs_location = locations->InAt(1);
2072
2073 Register rhs_reg = ZERO;
2074 int32_t rhs_imm = 0;
2075 bool use_imm = rhs_location.IsConstant();
2076 if (use_imm) {
2077 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2078 } else {
2079 rhs_reg = rhs_location.AsRegister<Register>();
2080 }
2081
2082 if (instruction->IsAnd()) {
2083 if (use_imm)
2084 __ Andi(dst, lhs, rhs_imm);
2085 else
2086 __ And(dst, lhs, rhs_reg);
2087 } else if (instruction->IsOr()) {
2088 if (use_imm)
2089 __ Ori(dst, lhs, rhs_imm);
2090 else
2091 __ Or(dst, lhs, rhs_reg);
2092 } else if (instruction->IsXor()) {
2093 if (use_imm)
2094 __ Xori(dst, lhs, rhs_imm);
2095 else
2096 __ Xor(dst, lhs, rhs_reg);
2097 } else if (instruction->IsAdd()) {
2098 if (use_imm)
2099 __ Addiu(dst, lhs, rhs_imm);
2100 else
2101 __ Addu(dst, lhs, rhs_reg);
2102 } else {
2103 DCHECK(instruction->IsSub());
2104 if (use_imm)
2105 __ Addiu(dst, lhs, -rhs_imm);
2106 else
2107 __ Subu(dst, lhs, rhs_reg);
2108 }
2109 break;
2110 }
2111
2112 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002113 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2114 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2115 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2116 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002117 Location rhs_location = locations->InAt(1);
2118 bool use_imm = rhs_location.IsConstant();
2119 if (!use_imm) {
2120 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2121 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2122 if (instruction->IsAnd()) {
2123 __ And(dst_low, lhs_low, rhs_low);
2124 __ And(dst_high, lhs_high, rhs_high);
2125 } else if (instruction->IsOr()) {
2126 __ Or(dst_low, lhs_low, rhs_low);
2127 __ Or(dst_high, lhs_high, rhs_high);
2128 } else if (instruction->IsXor()) {
2129 __ Xor(dst_low, lhs_low, rhs_low);
2130 __ Xor(dst_high, lhs_high, rhs_high);
2131 } else if (instruction->IsAdd()) {
2132 if (lhs_low == rhs_low) {
2133 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2134 __ Slt(TMP, lhs_low, ZERO);
2135 __ Addu(dst_low, lhs_low, rhs_low);
2136 } else {
2137 __ Addu(dst_low, lhs_low, rhs_low);
2138 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2139 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2140 }
2141 __ Addu(dst_high, lhs_high, rhs_high);
2142 __ Addu(dst_high, dst_high, TMP);
2143 } else {
2144 DCHECK(instruction->IsSub());
2145 __ Sltu(TMP, lhs_low, rhs_low);
2146 __ Subu(dst_low, lhs_low, rhs_low);
2147 __ Subu(dst_high, lhs_high, rhs_high);
2148 __ Subu(dst_high, dst_high, TMP);
2149 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002150 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002151 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2152 if (instruction->IsOr()) {
2153 uint32_t low = Low32Bits(value);
2154 uint32_t high = High32Bits(value);
2155 if (IsUint<16>(low)) {
2156 if (dst_low != lhs_low || low != 0) {
2157 __ Ori(dst_low, lhs_low, low);
2158 }
2159 } else {
2160 __ LoadConst32(TMP, low);
2161 __ Or(dst_low, lhs_low, TMP);
2162 }
2163 if (IsUint<16>(high)) {
2164 if (dst_high != lhs_high || high != 0) {
2165 __ Ori(dst_high, lhs_high, high);
2166 }
2167 } else {
2168 if (high != low) {
2169 __ LoadConst32(TMP, high);
2170 }
2171 __ Or(dst_high, lhs_high, TMP);
2172 }
2173 } else if (instruction->IsXor()) {
2174 uint32_t low = Low32Bits(value);
2175 uint32_t high = High32Bits(value);
2176 if (IsUint<16>(low)) {
2177 if (dst_low != lhs_low || low != 0) {
2178 __ Xori(dst_low, lhs_low, low);
2179 }
2180 } else {
2181 __ LoadConst32(TMP, low);
2182 __ Xor(dst_low, lhs_low, TMP);
2183 }
2184 if (IsUint<16>(high)) {
2185 if (dst_high != lhs_high || high != 0) {
2186 __ Xori(dst_high, lhs_high, high);
2187 }
2188 } else {
2189 if (high != low) {
2190 __ LoadConst32(TMP, high);
2191 }
2192 __ Xor(dst_high, lhs_high, TMP);
2193 }
2194 } else if (instruction->IsAnd()) {
2195 uint32_t low = Low32Bits(value);
2196 uint32_t high = High32Bits(value);
2197 if (IsUint<16>(low)) {
2198 __ Andi(dst_low, lhs_low, low);
2199 } else if (low != 0xFFFFFFFF) {
2200 __ LoadConst32(TMP, low);
2201 __ And(dst_low, lhs_low, TMP);
2202 } else if (dst_low != lhs_low) {
2203 __ Move(dst_low, lhs_low);
2204 }
2205 if (IsUint<16>(high)) {
2206 __ Andi(dst_high, lhs_high, high);
2207 } else if (high != 0xFFFFFFFF) {
2208 if (high != low) {
2209 __ LoadConst32(TMP, high);
2210 }
2211 __ And(dst_high, lhs_high, TMP);
2212 } else if (dst_high != lhs_high) {
2213 __ Move(dst_high, lhs_high);
2214 }
2215 } else {
2216 if (instruction->IsSub()) {
2217 value = -value;
2218 } else {
2219 DCHECK(instruction->IsAdd());
2220 }
2221 int32_t low = Low32Bits(value);
2222 int32_t high = High32Bits(value);
2223 if (IsInt<16>(low)) {
2224 if (dst_low != lhs_low || low != 0) {
2225 __ Addiu(dst_low, lhs_low, low);
2226 }
2227 if (low != 0) {
2228 __ Sltiu(AT, dst_low, low);
2229 }
2230 } else {
2231 __ LoadConst32(TMP, low);
2232 __ Addu(dst_low, lhs_low, TMP);
2233 __ Sltu(AT, dst_low, TMP);
2234 }
2235 if (IsInt<16>(high)) {
2236 if (dst_high != lhs_high || high != 0) {
2237 __ Addiu(dst_high, lhs_high, high);
2238 }
2239 } else {
2240 if (high != low) {
2241 __ LoadConst32(TMP, high);
2242 }
2243 __ Addu(dst_high, lhs_high, TMP);
2244 }
2245 if (low != 0) {
2246 __ Addu(dst_high, dst_high, AT);
2247 }
2248 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002249 }
2250 break;
2251 }
2252
2253 case Primitive::kPrimFloat:
2254 case Primitive::kPrimDouble: {
2255 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2256 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2257 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2258 if (instruction->IsAdd()) {
2259 if (type == Primitive::kPrimFloat) {
2260 __ AddS(dst, lhs, rhs);
2261 } else {
2262 __ AddD(dst, lhs, rhs);
2263 }
2264 } else {
2265 DCHECK(instruction->IsSub());
2266 if (type == Primitive::kPrimFloat) {
2267 __ SubS(dst, lhs, rhs);
2268 } else {
2269 __ SubD(dst, lhs, rhs);
2270 }
2271 }
2272 break;
2273 }
2274
2275 default:
2276 LOG(FATAL) << "Unexpected binary operation type " << type;
2277 }
2278}
2279
2280void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002281 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002282
2283 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2284 Primitive::Type type = instr->GetResultType();
2285 switch (type) {
2286 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002287 locations->SetInAt(0, Location::RequiresRegister());
2288 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2290 break;
2291 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002292 locations->SetInAt(0, Location::RequiresRegister());
2293 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2294 locations->SetOut(Location::RequiresRegister());
2295 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002296 default:
2297 LOG(FATAL) << "Unexpected shift type " << type;
2298 }
2299}
2300
2301static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2302
2303void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002304 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002305 LocationSummary* locations = instr->GetLocations();
2306 Primitive::Type type = instr->GetType();
2307
2308 Location rhs_location = locations->InAt(1);
2309 bool use_imm = rhs_location.IsConstant();
2310 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2311 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002312 const uint32_t shift_mask =
2313 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002314 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002315 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2316 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002317
2318 switch (type) {
2319 case Primitive::kPrimInt: {
2320 Register dst = locations->Out().AsRegister<Register>();
2321 Register lhs = locations->InAt(0).AsRegister<Register>();
2322 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002323 if (shift_value == 0) {
2324 if (dst != lhs) {
2325 __ Move(dst, lhs);
2326 }
2327 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002328 __ Sll(dst, lhs, shift_value);
2329 } else if (instr->IsShr()) {
2330 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002331 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002332 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002333 } else {
2334 if (has_ins_rotr) {
2335 __ Rotr(dst, lhs, shift_value);
2336 } else {
2337 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2338 __ Srl(dst, lhs, shift_value);
2339 __ Or(dst, dst, TMP);
2340 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002341 }
2342 } else {
2343 if (instr->IsShl()) {
2344 __ Sllv(dst, lhs, rhs_reg);
2345 } else if (instr->IsShr()) {
2346 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002347 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002348 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002349 } else {
2350 if (has_ins_rotr) {
2351 __ Rotrv(dst, lhs, rhs_reg);
2352 } else {
2353 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002354 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2355 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2356 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2357 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2358 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002359 __ Sllv(TMP, lhs, TMP);
2360 __ Srlv(dst, lhs, rhs_reg);
2361 __ Or(dst, dst, TMP);
2362 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002363 }
2364 }
2365 break;
2366 }
2367
2368 case Primitive::kPrimLong: {
2369 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2370 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2371 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2372 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2373 if (use_imm) {
2374 if (shift_value == 0) {
Lena Djokic8098da92017-06-28 12:07:50 +02002375 codegen_->MoveLocation(locations->Out(), locations->InAt(0), type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002376 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002377 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002378 if (instr->IsShl()) {
2379 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2380 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2381 __ Sll(dst_low, lhs_low, shift_value);
2382 } else if (instr->IsShr()) {
2383 __ Srl(dst_low, lhs_low, shift_value);
2384 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2385 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002386 } else if (instr->IsUShr()) {
2387 __ Srl(dst_low, lhs_low, shift_value);
2388 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2389 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002390 } else {
2391 __ Srl(dst_low, lhs_low, shift_value);
2392 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2393 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002394 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002395 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002396 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002397 if (instr->IsShl()) {
2398 __ Sll(dst_low, lhs_low, shift_value);
2399 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2400 __ Sll(dst_high, lhs_high, shift_value);
2401 __ Or(dst_high, dst_high, TMP);
2402 } else if (instr->IsShr()) {
2403 __ Sra(dst_high, lhs_high, shift_value);
2404 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2405 __ Srl(dst_low, lhs_low, shift_value);
2406 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002407 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002408 __ Srl(dst_high, lhs_high, shift_value);
2409 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2410 __ Srl(dst_low, lhs_low, shift_value);
2411 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002412 } else {
2413 __ Srl(TMP, lhs_low, shift_value);
2414 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2415 __ Or(dst_low, dst_low, TMP);
2416 __ Srl(TMP, lhs_high, shift_value);
2417 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2418 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002419 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002420 }
2421 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002422 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002423 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002424 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002425 __ Move(dst_low, ZERO);
2426 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002427 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002428 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002429 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002430 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002431 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002432 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002433 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002434 // 64-bit rotation by 32 is just a swap.
2435 __ Move(dst_low, lhs_high);
2436 __ Move(dst_high, lhs_low);
2437 } else {
2438 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002439 __ Srl(dst_low, lhs_high, shift_value_high);
2440 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2441 __ Srl(dst_high, lhs_low, shift_value_high);
2442 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002443 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002444 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2445 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002446 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002447 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2448 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002449 __ Or(dst_high, dst_high, TMP);
2450 }
2451 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002452 }
2453 }
2454 } else {
2455 MipsLabel done;
2456 if (instr->IsShl()) {
2457 __ Sllv(dst_low, lhs_low, rhs_reg);
2458 __ Nor(AT, ZERO, rhs_reg);
2459 __ Srl(TMP, lhs_low, 1);
2460 __ Srlv(TMP, TMP, AT);
2461 __ Sllv(dst_high, lhs_high, rhs_reg);
2462 __ Or(dst_high, dst_high, TMP);
2463 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2464 __ Beqz(TMP, &done);
2465 __ Move(dst_high, dst_low);
2466 __ Move(dst_low, ZERO);
2467 } else if (instr->IsShr()) {
2468 __ Srav(dst_high, lhs_high, rhs_reg);
2469 __ Nor(AT, ZERO, rhs_reg);
2470 __ Sll(TMP, lhs_high, 1);
2471 __ Sllv(TMP, TMP, AT);
2472 __ Srlv(dst_low, lhs_low, rhs_reg);
2473 __ Or(dst_low, dst_low, TMP);
2474 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2475 __ Beqz(TMP, &done);
2476 __ Move(dst_low, dst_high);
2477 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002478 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002479 __ Srlv(dst_high, lhs_high, rhs_reg);
2480 __ Nor(AT, ZERO, rhs_reg);
2481 __ Sll(TMP, lhs_high, 1);
2482 __ Sllv(TMP, TMP, AT);
2483 __ Srlv(dst_low, lhs_low, rhs_reg);
2484 __ Or(dst_low, dst_low, TMP);
2485 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2486 __ Beqz(TMP, &done);
2487 __ Move(dst_low, dst_high);
2488 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002489 } else {
2490 __ Nor(AT, ZERO, rhs_reg);
2491 __ Srlv(TMP, lhs_low, rhs_reg);
2492 __ Sll(dst_low, lhs_high, 1);
2493 __ Sllv(dst_low, dst_low, AT);
2494 __ Or(dst_low, dst_low, TMP);
2495 __ Srlv(TMP, lhs_high, rhs_reg);
2496 __ Sll(dst_high, lhs_low, 1);
2497 __ Sllv(dst_high, dst_high, AT);
2498 __ Or(dst_high, dst_high, TMP);
2499 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2500 __ Beqz(TMP, &done);
2501 __ Move(TMP, dst_high);
2502 __ Move(dst_high, dst_low);
2503 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002504 }
2505 __ Bind(&done);
2506 }
2507 break;
2508 }
2509
2510 default:
2511 LOG(FATAL) << "Unexpected shift operation type " << type;
2512 }
2513}
2514
2515void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2516 HandleBinaryOp(instruction);
2517}
2518
2519void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2520 HandleBinaryOp(instruction);
2521}
2522
2523void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2524 HandleBinaryOp(instruction);
2525}
2526
2527void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2528 HandleBinaryOp(instruction);
2529}
2530
2531void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002532 Primitive::Type type = instruction->GetType();
2533 bool object_array_get_with_read_barrier =
2534 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002535 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002536 new (GetGraph()->GetArena()) LocationSummary(instruction,
2537 object_array_get_with_read_barrier
2538 ? LocationSummary::kCallOnSlowPath
2539 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002540 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2541 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2542 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002543 locations->SetInAt(0, Location::RequiresRegister());
2544 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002545 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002546 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2547 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002548 // The output overlaps in the case of an object array get with
2549 // read barriers enabled: we do not want the move to overwrite the
2550 // array's location, as we need it to emit the read barrier.
2551 locations->SetOut(Location::RequiresRegister(),
2552 object_array_get_with_read_barrier
2553 ? Location::kOutputOverlap
2554 : Location::kNoOutputOverlap);
2555 }
2556 // We need a temporary register for the read barrier marking slow
2557 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2558 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002559 bool temp_needed = instruction->GetIndex()->IsConstant()
2560 ? !kBakerReadBarrierThunksEnableForFields
2561 : !kBakerReadBarrierThunksEnableForArrays;
2562 if (temp_needed) {
2563 locations->AddTemp(Location::RequiresRegister());
2564 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002565 }
2566}
2567
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002568static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2569 auto null_checker = [codegen, instruction]() {
2570 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002571 };
2572 return null_checker;
2573}
2574
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002575void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2576 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002577 Location obj_loc = locations->InAt(0);
2578 Register obj = obj_loc.AsRegister<Register>();
2579 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002580 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002581 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002582 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002583
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002584 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002585 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2586 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002587 switch (type) {
2588 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002589 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002590 if (index.IsConstant()) {
2591 size_t offset =
2592 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002593 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002594 } else {
2595 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002596 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002597 }
2598 break;
2599 }
2600
2601 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002602 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002603 if (index.IsConstant()) {
2604 size_t offset =
2605 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002606 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002607 } else {
2608 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002609 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002610 }
2611 break;
2612 }
2613
2614 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002615 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002616 if (index.IsConstant()) {
2617 size_t offset =
2618 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002619 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002620 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002621 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002622 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002623 }
2624 break;
2625 }
2626
2627 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002628 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002629 if (maybe_compressed_char_at) {
2630 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2631 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2632 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2633 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2634 "Expecting 0=compressed, 1=uncompressed");
2635 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002636 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002637 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2638 if (maybe_compressed_char_at) {
2639 MipsLabel uncompressed_load, done;
2640 __ Bnez(TMP, &uncompressed_load);
2641 __ LoadFromOffset(kLoadUnsignedByte,
2642 out,
2643 obj,
2644 data_offset + (const_index << TIMES_1));
2645 __ B(&done);
2646 __ Bind(&uncompressed_load);
2647 __ LoadFromOffset(kLoadUnsignedHalfword,
2648 out,
2649 obj,
2650 data_offset + (const_index << TIMES_2));
2651 __ Bind(&done);
2652 } else {
2653 __ LoadFromOffset(kLoadUnsignedHalfword,
2654 out,
2655 obj,
2656 data_offset + (const_index << TIMES_2),
2657 null_checker);
2658 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002659 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002660 Register index_reg = index.AsRegister<Register>();
2661 if (maybe_compressed_char_at) {
2662 MipsLabel uncompressed_load, done;
2663 __ Bnez(TMP, &uncompressed_load);
2664 __ Addu(TMP, obj, index_reg);
2665 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2666 __ B(&done);
2667 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002668 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002669 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2670 __ Bind(&done);
2671 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002672 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002673 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2674 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002675 }
2676 break;
2677 }
2678
Alexey Frunze15958152017-02-09 19:08:30 -08002679 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002680 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002681 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002682 if (index.IsConstant()) {
2683 size_t offset =
2684 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002685 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002686 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002687 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002688 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002689 }
2690 break;
2691 }
2692
Alexey Frunze15958152017-02-09 19:08:30 -08002693 case Primitive::kPrimNot: {
2694 static_assert(
2695 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2696 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2697 // /* HeapReference<Object> */ out =
2698 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2699 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002700 bool temp_needed = index.IsConstant()
2701 ? !kBakerReadBarrierThunksEnableForFields
2702 : !kBakerReadBarrierThunksEnableForArrays;
2703 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002704 // Note that a potential implicit null check is handled in this
2705 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002706 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2707 if (index.IsConstant()) {
2708 // Array load with a constant index can be treated as a field load.
2709 size_t offset =
2710 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2711 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2712 out_loc,
2713 obj,
2714 offset,
2715 temp,
2716 /* needs_null_check */ false);
2717 } else {
2718 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2719 out_loc,
2720 obj,
2721 data_offset,
2722 index,
2723 temp,
2724 /* needs_null_check */ false);
2725 }
Alexey Frunze15958152017-02-09 19:08:30 -08002726 } else {
2727 Register out = out_loc.AsRegister<Register>();
2728 if (index.IsConstant()) {
2729 size_t offset =
2730 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2731 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2732 // If read barriers are enabled, emit read barriers other than
2733 // Baker's using a slow path (and also unpoison the loaded
2734 // reference, if heap poisoning is enabled).
2735 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2736 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002737 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002738 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2739 // If read barriers are enabled, emit read barriers other than
2740 // Baker's using a slow path (and also unpoison the loaded
2741 // reference, if heap poisoning is enabled).
2742 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2743 out_loc,
2744 out_loc,
2745 obj_loc,
2746 data_offset,
2747 index);
2748 }
2749 }
2750 break;
2751 }
2752
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002753 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002754 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002755 if (index.IsConstant()) {
2756 size_t offset =
2757 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002758 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002759 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002760 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002761 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002762 }
2763 break;
2764 }
2765
2766 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002767 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002768 if (index.IsConstant()) {
2769 size_t offset =
2770 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002771 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002772 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002773 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002774 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002775 }
2776 break;
2777 }
2778
2779 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002780 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002781 if (index.IsConstant()) {
2782 size_t offset =
2783 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002784 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002785 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002786 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002787 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002788 }
2789 break;
2790 }
2791
2792 case Primitive::kPrimVoid:
2793 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2794 UNREACHABLE();
2795 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002796}
2797
2798void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2799 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2800 locations->SetInAt(0, Location::RequiresRegister());
2801 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2802}
2803
2804void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2805 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002806 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002807 Register obj = locations->InAt(0).AsRegister<Register>();
2808 Register out = locations->Out().AsRegister<Register>();
2809 __ LoadFromOffset(kLoadWord, out, obj, offset);
2810 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002811 // Mask out compression flag from String's array length.
2812 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2813 __ Srl(out, out, 1u);
2814 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002815}
2816
Alexey Frunzef58b2482016-09-02 22:14:06 -07002817Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2818 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2819 ? Location::ConstantLocation(instruction->AsConstant())
2820 : Location::RequiresRegister();
2821}
2822
2823Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2824 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2825 // We can store a non-zero float or double constant without first loading it into the FPU,
2826 // but we should only prefer this if the constant has a single use.
2827 if (instruction->IsConstant() &&
2828 (instruction->AsConstant()->IsZeroBitPattern() ||
2829 instruction->GetUses().HasExactlyOneElement())) {
2830 return Location::ConstantLocation(instruction->AsConstant());
2831 // Otherwise fall through and require an FPU register for the constant.
2832 }
2833 return Location::RequiresFpuRegister();
2834}
2835
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002836void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002837 Primitive::Type value_type = instruction->GetComponentType();
2838
2839 bool needs_write_barrier =
2840 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2841 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2842
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002843 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2844 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002845 may_need_runtime_call_for_type_check ?
2846 LocationSummary::kCallOnSlowPath :
2847 LocationSummary::kNoCall);
2848
2849 locations->SetInAt(0, Location::RequiresRegister());
2850 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2851 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2852 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002853 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002854 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2855 }
2856 if (needs_write_barrier) {
2857 // Temporary register for the write barrier.
2858 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002859 }
2860}
2861
2862void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2863 LocationSummary* locations = instruction->GetLocations();
2864 Register obj = locations->InAt(0).AsRegister<Register>();
2865 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002866 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002867 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002868 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002869 bool needs_write_barrier =
2870 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002871 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002872 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002873
2874 switch (value_type) {
2875 case Primitive::kPrimBoolean:
2876 case Primitive::kPrimByte: {
2877 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002878 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002879 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002880 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002881 __ Addu(base_reg, obj, index.AsRegister<Register>());
2882 }
2883 if (value_location.IsConstant()) {
2884 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2885 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2886 } else {
2887 Register value = value_location.AsRegister<Register>();
2888 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002889 }
2890 break;
2891 }
2892
2893 case Primitive::kPrimShort:
2894 case Primitive::kPrimChar: {
2895 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002896 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002897 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002898 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002899 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002900 }
2901 if (value_location.IsConstant()) {
2902 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2903 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2904 } else {
2905 Register value = value_location.AsRegister<Register>();
2906 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002907 }
2908 break;
2909 }
2910
Alexey Frunze15958152017-02-09 19:08:30 -08002911 case Primitive::kPrimInt: {
2912 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2913 if (index.IsConstant()) {
2914 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2915 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002916 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002917 }
2918 if (value_location.IsConstant()) {
2919 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2920 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2921 } else {
2922 Register value = value_location.AsRegister<Register>();
2923 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2924 }
2925 break;
2926 }
2927
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002928 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002929 if (value_location.IsConstant()) {
2930 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002931 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002932 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002933 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002934 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002935 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002936 }
Alexey Frunze15958152017-02-09 19:08:30 -08002937 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2938 DCHECK_EQ(value, 0);
2939 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2940 DCHECK(!needs_write_barrier);
2941 DCHECK(!may_need_runtime_call_for_type_check);
2942 break;
2943 }
2944
2945 DCHECK(needs_write_barrier);
2946 Register value = value_location.AsRegister<Register>();
2947 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2948 Register temp2 = TMP; // Doesn't need to survive slow path.
2949 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2950 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2951 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2952 MipsLabel done;
2953 SlowPathCodeMIPS* slow_path = nullptr;
2954
2955 if (may_need_runtime_call_for_type_check) {
2956 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2957 codegen_->AddSlowPath(slow_path);
2958 if (instruction->GetValueCanBeNull()) {
2959 MipsLabel non_zero;
2960 __ Bnez(value, &non_zero);
2961 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2962 if (index.IsConstant()) {
2963 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002964 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002965 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002966 }
Alexey Frunze15958152017-02-09 19:08:30 -08002967 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2968 __ B(&done);
2969 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002970 }
Alexey Frunze15958152017-02-09 19:08:30 -08002971
2972 // Note that when read barriers are enabled, the type checks
2973 // are performed without read barriers. This is fine, even in
2974 // the case where a class object is in the from-space after
2975 // the flip, as a comparison involving such a type would not
2976 // produce a false positive; it may of course produce a false
2977 // negative, in which case we would take the ArraySet slow
2978 // path.
2979
2980 // /* HeapReference<Class> */ temp1 = obj->klass_
2981 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2982 __ MaybeUnpoisonHeapReference(temp1);
2983
2984 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2985 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2986 // /* HeapReference<Class> */ temp2 = value->klass_
2987 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2988 // If heap poisoning is enabled, no need to unpoison `temp1`
2989 // nor `temp2`, as we are comparing two poisoned references.
2990
2991 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2992 MipsLabel do_put;
2993 __ Beq(temp1, temp2, &do_put);
2994 // If heap poisoning is enabled, the `temp1` reference has
2995 // not been unpoisoned yet; unpoison it now.
2996 __ MaybeUnpoisonHeapReference(temp1);
2997
2998 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2999 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
3000 // If heap poisoning is enabled, no need to unpoison
3001 // `temp1`, as we are comparing against null below.
3002 __ Bnez(temp1, slow_path->GetEntryLabel());
3003 __ Bind(&do_put);
3004 } else {
3005 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
3006 }
3007 }
3008
3009 Register source = value;
3010 if (kPoisonHeapReferences) {
3011 // Note that in the case where `value` is a null reference,
3012 // we do not enter this block, as a null reference does not
3013 // need poisoning.
3014 __ Move(temp1, value);
3015 __ PoisonHeapReference(temp1);
3016 source = temp1;
3017 }
3018
3019 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3020 if (index.IsConstant()) {
3021 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003022 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003023 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08003024 }
3025 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
3026
3027 if (!may_need_runtime_call_for_type_check) {
3028 codegen_->MaybeRecordImplicitNullCheck(instruction);
3029 }
3030
3031 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
3032
3033 if (done.IsLinked()) {
3034 __ Bind(&done);
3035 }
3036
3037 if (slow_path != nullptr) {
3038 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003039 }
3040 break;
3041 }
3042
3043 case Primitive::kPrimLong: {
3044 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003045 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003046 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003047 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003048 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003049 }
3050 if (value_location.IsConstant()) {
3051 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3052 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3053 } else {
3054 Register value = value_location.AsRegisterPairLow<Register>();
3055 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003056 }
3057 break;
3058 }
3059
3060 case Primitive::kPrimFloat: {
3061 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003062 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003063 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003064 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003065 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003066 }
3067 if (value_location.IsConstant()) {
3068 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3069 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3070 } else {
3071 FRegister value = value_location.AsFpuRegister<FRegister>();
3072 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003073 }
3074 break;
3075 }
3076
3077 case Primitive::kPrimDouble: {
3078 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003079 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003080 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003081 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003082 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003083 }
3084 if (value_location.IsConstant()) {
3085 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3086 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3087 } else {
3088 FRegister value = value_location.AsFpuRegister<FRegister>();
3089 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003090 }
3091 break;
3092 }
3093
3094 case Primitive::kPrimVoid:
3095 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3096 UNREACHABLE();
3097 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003098}
3099
3100void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003101 RegisterSet caller_saves = RegisterSet::Empty();
3102 InvokeRuntimeCallingConvention calling_convention;
3103 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3104 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3105 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003106 locations->SetInAt(0, Location::RequiresRegister());
3107 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003108}
3109
3110void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3111 LocationSummary* locations = instruction->GetLocations();
3112 BoundsCheckSlowPathMIPS* slow_path =
3113 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3114 codegen_->AddSlowPath(slow_path);
3115
3116 Register index = locations->InAt(0).AsRegister<Register>();
3117 Register length = locations->InAt(1).AsRegister<Register>();
3118
3119 // length is limited by the maximum positive signed 32-bit integer.
3120 // Unsigned comparison of length and index checks for index < 0
3121 // and for length <= index simultaneously.
3122 __ Bgeu(index, length, slow_path->GetEntryLabel());
3123}
3124
Alexey Frunze15958152017-02-09 19:08:30 -08003125// Temp is used for read barrier.
3126static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3127 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07003128 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08003129 (kUseBakerReadBarrier ||
3130 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3131 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3132 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3133 return 1;
3134 }
3135 return 0;
3136}
3137
3138// Extra temp is used for read barrier.
3139static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3140 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3141}
3142
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003143void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003144 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3145 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3146
3147 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3148 switch (type_check_kind) {
3149 case TypeCheckKind::kExactCheck:
3150 case TypeCheckKind::kAbstractClassCheck:
3151 case TypeCheckKind::kClassHierarchyCheck:
3152 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003153 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003154 ? LocationSummary::kCallOnSlowPath
3155 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3156 break;
3157 case TypeCheckKind::kArrayCheck:
3158 case TypeCheckKind::kUnresolvedCheck:
3159 case TypeCheckKind::kInterfaceCheck:
3160 call_kind = LocationSummary::kCallOnSlowPath;
3161 break;
3162 }
3163
3164 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003165 locations->SetInAt(0, Location::RequiresRegister());
3166 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003167 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003168}
3169
3170void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003171 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003172 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003173 Location obj_loc = locations->InAt(0);
3174 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003175 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003176 Location temp_loc = locations->GetTemp(0);
3177 Register temp = temp_loc.AsRegister<Register>();
3178 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3179 DCHECK_LE(num_temps, 2u);
3180 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003181 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3182 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3183 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3184 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3185 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3186 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3187 const uint32_t object_array_data_offset =
3188 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3189 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003190
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003191 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3192 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3193 // read barriers is done for performance and code size reasons.
3194 bool is_type_check_slow_path_fatal = false;
3195 if (!kEmitCompilerReadBarrier) {
3196 is_type_check_slow_path_fatal =
3197 (type_check_kind == TypeCheckKind::kExactCheck ||
3198 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3199 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3200 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3201 !instruction->CanThrowIntoCatchBlock();
3202 }
3203 SlowPathCodeMIPS* slow_path =
3204 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3205 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003206 codegen_->AddSlowPath(slow_path);
3207
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003208 // Avoid this check if we know `obj` is not null.
3209 if (instruction->MustDoNullCheck()) {
3210 __ Beqz(obj, &done);
3211 }
3212
3213 switch (type_check_kind) {
3214 case TypeCheckKind::kExactCheck:
3215 case TypeCheckKind::kArrayCheck: {
3216 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003217 GenerateReferenceLoadTwoRegisters(instruction,
3218 temp_loc,
3219 obj_loc,
3220 class_offset,
3221 maybe_temp2_loc,
3222 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003223 // Jump to slow path for throwing the exception or doing a
3224 // more involved array check.
3225 __ Bne(temp, cls, slow_path->GetEntryLabel());
3226 break;
3227 }
3228
3229 case TypeCheckKind::kAbstractClassCheck: {
3230 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003231 GenerateReferenceLoadTwoRegisters(instruction,
3232 temp_loc,
3233 obj_loc,
3234 class_offset,
3235 maybe_temp2_loc,
3236 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003237 // If the class is abstract, we eagerly fetch the super class of the
3238 // object to avoid doing a comparison we know will fail.
3239 MipsLabel loop;
3240 __ Bind(&loop);
3241 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003242 GenerateReferenceLoadOneRegister(instruction,
3243 temp_loc,
3244 super_offset,
3245 maybe_temp2_loc,
3246 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003247 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3248 // exception.
3249 __ Beqz(temp, slow_path->GetEntryLabel());
3250 // Otherwise, compare the classes.
3251 __ Bne(temp, cls, &loop);
3252 break;
3253 }
3254
3255 case TypeCheckKind::kClassHierarchyCheck: {
3256 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003257 GenerateReferenceLoadTwoRegisters(instruction,
3258 temp_loc,
3259 obj_loc,
3260 class_offset,
3261 maybe_temp2_loc,
3262 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003263 // Walk over the class hierarchy to find a match.
3264 MipsLabel loop;
3265 __ Bind(&loop);
3266 __ Beq(temp, cls, &done);
3267 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003268 GenerateReferenceLoadOneRegister(instruction,
3269 temp_loc,
3270 super_offset,
3271 maybe_temp2_loc,
3272 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003273 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3274 // exception. Otherwise, jump to the beginning of the loop.
3275 __ Bnez(temp, &loop);
3276 __ B(slow_path->GetEntryLabel());
3277 break;
3278 }
3279
3280 case TypeCheckKind::kArrayObjectCheck: {
3281 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003282 GenerateReferenceLoadTwoRegisters(instruction,
3283 temp_loc,
3284 obj_loc,
3285 class_offset,
3286 maybe_temp2_loc,
3287 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003288 // Do an exact check.
3289 __ Beq(temp, cls, &done);
3290 // Otherwise, we need to check that the object's class is a non-primitive array.
3291 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003292 GenerateReferenceLoadOneRegister(instruction,
3293 temp_loc,
3294 component_offset,
3295 maybe_temp2_loc,
3296 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003297 // If the component type is null, jump to the slow path to throw the exception.
3298 __ Beqz(temp, slow_path->GetEntryLabel());
3299 // Otherwise, the object is indeed an array, further check that this component
3300 // type is not a primitive type.
3301 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3302 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3303 __ Bnez(temp, slow_path->GetEntryLabel());
3304 break;
3305 }
3306
3307 case TypeCheckKind::kUnresolvedCheck:
3308 // We always go into the type check slow path for the unresolved check case.
3309 // We cannot directly call the CheckCast runtime entry point
3310 // without resorting to a type checking slow path here (i.e. by
3311 // calling InvokeRuntime directly), as it would require to
3312 // assign fixed registers for the inputs of this HInstanceOf
3313 // instruction (following the runtime calling convention), which
3314 // might be cluttered by the potential first read barrier
3315 // emission at the beginning of this method.
3316 __ B(slow_path->GetEntryLabel());
3317 break;
3318
3319 case TypeCheckKind::kInterfaceCheck: {
3320 // Avoid read barriers to improve performance of the fast path. We can not get false
3321 // positives by doing this.
3322 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003323 GenerateReferenceLoadTwoRegisters(instruction,
3324 temp_loc,
3325 obj_loc,
3326 class_offset,
3327 maybe_temp2_loc,
3328 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003329 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003330 GenerateReferenceLoadTwoRegisters(instruction,
3331 temp_loc,
3332 temp_loc,
3333 iftable_offset,
3334 maybe_temp2_loc,
3335 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003336 // Iftable is never null.
3337 __ Lw(TMP, temp, array_length_offset);
3338 // Loop through the iftable and check if any class matches.
3339 MipsLabel loop;
3340 __ Bind(&loop);
3341 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3342 __ Beqz(TMP, slow_path->GetEntryLabel());
3343 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3344 __ MaybeUnpoisonHeapReference(AT);
3345 // Go to next interface.
3346 __ Addiu(TMP, TMP, -2);
3347 // Compare the classes and continue the loop if they do not match.
3348 __ Bne(AT, cls, &loop);
3349 break;
3350 }
3351 }
3352
3353 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003354 __ Bind(slow_path->GetExitLabel());
3355}
3356
3357void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3358 LocationSummary* locations =
3359 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3360 locations->SetInAt(0, Location::RequiresRegister());
3361 if (check->HasUses()) {
3362 locations->SetOut(Location::SameAsFirstInput());
3363 }
3364}
3365
3366void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3367 // We assume the class is not null.
3368 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3369 check->GetLoadClass(),
3370 check,
3371 check->GetDexPc(),
3372 true);
3373 codegen_->AddSlowPath(slow_path);
3374 GenerateClassInitializationCheck(slow_path,
3375 check->GetLocations()->InAt(0).AsRegister<Register>());
3376}
3377
3378void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3379 Primitive::Type in_type = compare->InputAt(0)->GetType();
3380
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003381 LocationSummary* locations =
3382 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003383
3384 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003385 case Primitive::kPrimBoolean:
3386 case Primitive::kPrimByte:
3387 case Primitive::kPrimShort:
3388 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003389 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003390 locations->SetInAt(0, Location::RequiresRegister());
3391 locations->SetInAt(1, Location::RequiresRegister());
3392 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3393 break;
3394
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003395 case Primitive::kPrimLong:
3396 locations->SetInAt(0, Location::RequiresRegister());
3397 locations->SetInAt(1, Location::RequiresRegister());
3398 // Output overlaps because it is written before doing the low comparison.
3399 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3400 break;
3401
3402 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003403 case Primitive::kPrimDouble:
3404 locations->SetInAt(0, Location::RequiresFpuRegister());
3405 locations->SetInAt(1, Location::RequiresFpuRegister());
3406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003407 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003408
3409 default:
3410 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3411 }
3412}
3413
3414void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3415 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003416 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003417 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003418 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003419
3420 // 0 if: left == right
3421 // 1 if: left > right
3422 // -1 if: left < right
3423 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003424 case Primitive::kPrimBoolean:
3425 case Primitive::kPrimByte:
3426 case Primitive::kPrimShort:
3427 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003428 case Primitive::kPrimInt: {
3429 Register lhs = locations->InAt(0).AsRegister<Register>();
3430 Register rhs = locations->InAt(1).AsRegister<Register>();
3431 __ Slt(TMP, lhs, rhs);
3432 __ Slt(res, rhs, lhs);
3433 __ Subu(res, res, TMP);
3434 break;
3435 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003436 case Primitive::kPrimLong: {
3437 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003438 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3439 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3440 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3441 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3442 // TODO: more efficient (direct) comparison with a constant.
3443 __ Slt(TMP, lhs_high, rhs_high);
3444 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3445 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3446 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3447 __ Sltu(TMP, lhs_low, rhs_low);
3448 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3449 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3450 __ Bind(&done);
3451 break;
3452 }
3453
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003454 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003455 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003456 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3457 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3458 MipsLabel done;
3459 if (isR6) {
3460 __ CmpEqS(FTMP, lhs, rhs);
3461 __ LoadConst32(res, 0);
3462 __ Bc1nez(FTMP, &done);
3463 if (gt_bias) {
3464 __ CmpLtS(FTMP, lhs, rhs);
3465 __ LoadConst32(res, -1);
3466 __ Bc1nez(FTMP, &done);
3467 __ LoadConst32(res, 1);
3468 } else {
3469 __ CmpLtS(FTMP, rhs, lhs);
3470 __ LoadConst32(res, 1);
3471 __ Bc1nez(FTMP, &done);
3472 __ LoadConst32(res, -1);
3473 }
3474 } else {
3475 if (gt_bias) {
3476 __ ColtS(0, lhs, rhs);
3477 __ LoadConst32(res, -1);
3478 __ Bc1t(0, &done);
3479 __ CeqS(0, lhs, rhs);
3480 __ LoadConst32(res, 1);
3481 __ Movt(res, ZERO, 0);
3482 } else {
3483 __ ColtS(0, rhs, lhs);
3484 __ LoadConst32(res, 1);
3485 __ Bc1t(0, &done);
3486 __ CeqS(0, lhs, rhs);
3487 __ LoadConst32(res, -1);
3488 __ Movt(res, ZERO, 0);
3489 }
3490 }
3491 __ Bind(&done);
3492 break;
3493 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003494 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003495 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003496 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3497 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3498 MipsLabel done;
3499 if (isR6) {
3500 __ CmpEqD(FTMP, lhs, rhs);
3501 __ LoadConst32(res, 0);
3502 __ Bc1nez(FTMP, &done);
3503 if (gt_bias) {
3504 __ CmpLtD(FTMP, lhs, rhs);
3505 __ LoadConst32(res, -1);
3506 __ Bc1nez(FTMP, &done);
3507 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003508 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003509 __ CmpLtD(FTMP, rhs, lhs);
3510 __ LoadConst32(res, 1);
3511 __ Bc1nez(FTMP, &done);
3512 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003513 }
3514 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003515 if (gt_bias) {
3516 __ ColtD(0, lhs, rhs);
3517 __ LoadConst32(res, -1);
3518 __ Bc1t(0, &done);
3519 __ CeqD(0, lhs, rhs);
3520 __ LoadConst32(res, 1);
3521 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003522 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003523 __ ColtD(0, rhs, lhs);
3524 __ LoadConst32(res, 1);
3525 __ Bc1t(0, &done);
3526 __ CeqD(0, lhs, rhs);
3527 __ LoadConst32(res, -1);
3528 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003529 }
3530 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003531 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003532 break;
3533 }
3534
3535 default:
3536 LOG(FATAL) << "Unimplemented compare type " << in_type;
3537 }
3538}
3539
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003540void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003541 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003542 switch (instruction->InputAt(0)->GetType()) {
3543 default:
3544 case Primitive::kPrimLong:
3545 locations->SetInAt(0, Location::RequiresRegister());
3546 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3547 break;
3548
3549 case Primitive::kPrimFloat:
3550 case Primitive::kPrimDouble:
3551 locations->SetInAt(0, Location::RequiresFpuRegister());
3552 locations->SetInAt(1, Location::RequiresFpuRegister());
3553 break;
3554 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003555 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003556 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3557 }
3558}
3559
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003560void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003561 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003562 return;
3563 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003564
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003565 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003566 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003567
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003568 switch (type) {
3569 default:
3570 // Integer case.
3571 GenerateIntCompare(instruction->GetCondition(), locations);
3572 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003573
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003574 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003575 GenerateLongCompare(instruction->GetCondition(), locations);
3576 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003577
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003578 case Primitive::kPrimFloat:
3579 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003580 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3581 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003582 }
3583}
3584
Alexey Frunze7e99e052015-11-24 19:28:01 -08003585void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3586 DCHECK(instruction->IsDiv() || instruction->IsRem());
3587 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3588
3589 LocationSummary* locations = instruction->GetLocations();
3590 Location second = locations->InAt(1);
3591 DCHECK(second.IsConstant());
3592
3593 Register out = locations->Out().AsRegister<Register>();
3594 Register dividend = locations->InAt(0).AsRegister<Register>();
3595 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3596 DCHECK(imm == 1 || imm == -1);
3597
3598 if (instruction->IsRem()) {
3599 __ Move(out, ZERO);
3600 } else {
3601 if (imm == -1) {
3602 __ Subu(out, ZERO, dividend);
3603 } else if (out != dividend) {
3604 __ Move(out, dividend);
3605 }
3606 }
3607}
3608
3609void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3610 DCHECK(instruction->IsDiv() || instruction->IsRem());
3611 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3612
3613 LocationSummary* locations = instruction->GetLocations();
3614 Location second = locations->InAt(1);
3615 DCHECK(second.IsConstant());
3616
3617 Register out = locations->Out().AsRegister<Register>();
3618 Register dividend = locations->InAt(0).AsRegister<Register>();
3619 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003620 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003621 int ctz_imm = CTZ(abs_imm);
3622
3623 if (instruction->IsDiv()) {
3624 if (ctz_imm == 1) {
3625 // Fast path for division by +/-2, which is very common.
3626 __ Srl(TMP, dividend, 31);
3627 } else {
3628 __ Sra(TMP, dividend, 31);
3629 __ Srl(TMP, TMP, 32 - ctz_imm);
3630 }
3631 __ Addu(out, dividend, TMP);
3632 __ Sra(out, out, ctz_imm);
3633 if (imm < 0) {
3634 __ Subu(out, ZERO, out);
3635 }
3636 } else {
3637 if (ctz_imm == 1) {
3638 // Fast path for modulo +/-2, which is very common.
3639 __ Sra(TMP, dividend, 31);
3640 __ Subu(out, dividend, TMP);
3641 __ Andi(out, out, 1);
3642 __ Addu(out, out, TMP);
3643 } else {
3644 __ Sra(TMP, dividend, 31);
3645 __ Srl(TMP, TMP, 32 - ctz_imm);
3646 __ Addu(out, dividend, TMP);
3647 if (IsUint<16>(abs_imm - 1)) {
3648 __ Andi(out, out, abs_imm - 1);
3649 } else {
3650 __ Sll(out, out, 32 - ctz_imm);
3651 __ Srl(out, out, 32 - ctz_imm);
3652 }
3653 __ Subu(out, out, TMP);
3654 }
3655 }
3656}
3657
3658void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3659 DCHECK(instruction->IsDiv() || instruction->IsRem());
3660 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3661
3662 LocationSummary* locations = instruction->GetLocations();
3663 Location second = locations->InAt(1);
3664 DCHECK(second.IsConstant());
3665
3666 Register out = locations->Out().AsRegister<Register>();
3667 Register dividend = locations->InAt(0).AsRegister<Register>();
3668 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3669
3670 int64_t magic;
3671 int shift;
3672 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3673
3674 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3675
3676 __ LoadConst32(TMP, magic);
3677 if (isR6) {
3678 __ MuhR6(TMP, dividend, TMP);
3679 } else {
3680 __ MultR2(dividend, TMP);
3681 __ Mfhi(TMP);
3682 }
3683 if (imm > 0 && magic < 0) {
3684 __ Addu(TMP, TMP, dividend);
3685 } else if (imm < 0 && magic > 0) {
3686 __ Subu(TMP, TMP, dividend);
3687 }
3688
3689 if (shift != 0) {
3690 __ Sra(TMP, TMP, shift);
3691 }
3692
3693 if (instruction->IsDiv()) {
3694 __ Sra(out, TMP, 31);
3695 __ Subu(out, TMP, out);
3696 } else {
3697 __ Sra(AT, TMP, 31);
3698 __ Subu(AT, TMP, AT);
3699 __ LoadConst32(TMP, imm);
3700 if (isR6) {
3701 __ MulR6(TMP, AT, TMP);
3702 } else {
3703 __ MulR2(TMP, AT, TMP);
3704 }
3705 __ Subu(out, dividend, TMP);
3706 }
3707}
3708
3709void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3710 DCHECK(instruction->IsDiv() || instruction->IsRem());
3711 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3712
3713 LocationSummary* locations = instruction->GetLocations();
3714 Register out = locations->Out().AsRegister<Register>();
3715 Location second = locations->InAt(1);
3716
3717 if (second.IsConstant()) {
3718 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3719 if (imm == 0) {
3720 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3721 } else if (imm == 1 || imm == -1) {
3722 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003723 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003724 DivRemByPowerOfTwo(instruction);
3725 } else {
3726 DCHECK(imm <= -2 || imm >= 2);
3727 GenerateDivRemWithAnyConstant(instruction);
3728 }
3729 } else {
3730 Register dividend = locations->InAt(0).AsRegister<Register>();
3731 Register divisor = second.AsRegister<Register>();
3732 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3733 if (instruction->IsDiv()) {
3734 if (isR6) {
3735 __ DivR6(out, dividend, divisor);
3736 } else {
3737 __ DivR2(out, dividend, divisor);
3738 }
3739 } else {
3740 if (isR6) {
3741 __ ModR6(out, dividend, divisor);
3742 } else {
3743 __ ModR2(out, dividend, divisor);
3744 }
3745 }
3746 }
3747}
3748
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003749void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3750 Primitive::Type type = div->GetResultType();
3751 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003752 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003753 : LocationSummary::kNoCall;
3754
3755 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3756
3757 switch (type) {
3758 case Primitive::kPrimInt:
3759 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003760 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003761 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3762 break;
3763
3764 case Primitive::kPrimLong: {
3765 InvokeRuntimeCallingConvention calling_convention;
3766 locations->SetInAt(0, Location::RegisterPairLocation(
3767 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3768 locations->SetInAt(1, Location::RegisterPairLocation(
3769 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3770 locations->SetOut(calling_convention.GetReturnLocation(type));
3771 break;
3772 }
3773
3774 case Primitive::kPrimFloat:
3775 case Primitive::kPrimDouble:
3776 locations->SetInAt(0, Location::RequiresFpuRegister());
3777 locations->SetInAt(1, Location::RequiresFpuRegister());
3778 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3779 break;
3780
3781 default:
3782 LOG(FATAL) << "Unexpected div type " << type;
3783 }
3784}
3785
3786void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3787 Primitive::Type type = instruction->GetType();
3788 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003789
3790 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003791 case Primitive::kPrimInt:
3792 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003793 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003794 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003795 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003796 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3797 break;
3798 }
3799 case Primitive::kPrimFloat:
3800 case Primitive::kPrimDouble: {
3801 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3802 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3803 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3804 if (type == Primitive::kPrimFloat) {
3805 __ DivS(dst, lhs, rhs);
3806 } else {
3807 __ DivD(dst, lhs, rhs);
3808 }
3809 break;
3810 }
3811 default:
3812 LOG(FATAL) << "Unexpected div type " << type;
3813 }
3814}
3815
3816void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003817 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003818 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003819}
3820
3821void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3822 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3823 codegen_->AddSlowPath(slow_path);
3824 Location value = instruction->GetLocations()->InAt(0);
3825 Primitive::Type type = instruction->GetType();
3826
3827 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003828 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003829 case Primitive::kPrimByte:
3830 case Primitive::kPrimChar:
3831 case Primitive::kPrimShort:
3832 case Primitive::kPrimInt: {
3833 if (value.IsConstant()) {
3834 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3835 __ B(slow_path->GetEntryLabel());
3836 } else {
3837 // A division by a non-null constant is valid. We don't need to perform
3838 // any check, so simply fall through.
3839 }
3840 } else {
3841 DCHECK(value.IsRegister()) << value;
3842 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3843 }
3844 break;
3845 }
3846 case Primitive::kPrimLong: {
3847 if (value.IsConstant()) {
3848 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3849 __ B(slow_path->GetEntryLabel());
3850 } else {
3851 // A division by a non-null constant is valid. We don't need to perform
3852 // any check, so simply fall through.
3853 }
3854 } else {
3855 DCHECK(value.IsRegisterPair()) << value;
3856 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3857 __ Beqz(TMP, slow_path->GetEntryLabel());
3858 }
3859 break;
3860 }
3861 default:
3862 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3863 }
3864}
3865
3866void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3867 LocationSummary* locations =
3868 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3869 locations->SetOut(Location::ConstantLocation(constant));
3870}
3871
3872void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3873 // Will be generated at use site.
3874}
3875
3876void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3877 exit->SetLocations(nullptr);
3878}
3879
3880void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3881}
3882
3883void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3884 LocationSummary* locations =
3885 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3886 locations->SetOut(Location::ConstantLocation(constant));
3887}
3888
3889void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3890 // Will be generated at use site.
3891}
3892
3893void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3894 got->SetLocations(nullptr);
3895}
3896
3897void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3898 DCHECK(!successor->IsExitBlock());
3899 HBasicBlock* block = got->GetBlock();
3900 HInstruction* previous = got->GetPrevious();
3901 HLoopInformation* info = block->GetLoopInformation();
3902
3903 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3904 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3905 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3906 return;
3907 }
3908 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3909 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3910 }
3911 if (!codegen_->GoesToNextBlock(block, successor)) {
3912 __ B(codegen_->GetLabelOf(successor));
3913 }
3914}
3915
3916void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3917 HandleGoto(got, got->GetSuccessor());
3918}
3919
3920void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3921 try_boundary->SetLocations(nullptr);
3922}
3923
3924void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3925 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3926 if (!successor->IsExitBlock()) {
3927 HandleGoto(try_boundary, successor);
3928 }
3929}
3930
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003931void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3932 LocationSummary* locations) {
3933 Register dst = locations->Out().AsRegister<Register>();
3934 Register lhs = locations->InAt(0).AsRegister<Register>();
3935 Location rhs_location = locations->InAt(1);
3936 Register rhs_reg = ZERO;
3937 int64_t rhs_imm = 0;
3938 bool use_imm = rhs_location.IsConstant();
3939 if (use_imm) {
3940 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3941 } else {
3942 rhs_reg = rhs_location.AsRegister<Register>();
3943 }
3944
3945 switch (cond) {
3946 case kCondEQ:
3947 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003948 if (use_imm && IsInt<16>(-rhs_imm)) {
3949 if (rhs_imm == 0) {
3950 if (cond == kCondEQ) {
3951 __ Sltiu(dst, lhs, 1);
3952 } else {
3953 __ Sltu(dst, ZERO, lhs);
3954 }
3955 } else {
3956 __ Addiu(dst, lhs, -rhs_imm);
3957 if (cond == kCondEQ) {
3958 __ Sltiu(dst, dst, 1);
3959 } else {
3960 __ Sltu(dst, ZERO, dst);
3961 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003962 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003963 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003964 if (use_imm && IsUint<16>(rhs_imm)) {
3965 __ Xori(dst, lhs, rhs_imm);
3966 } else {
3967 if (use_imm) {
3968 rhs_reg = TMP;
3969 __ LoadConst32(rhs_reg, rhs_imm);
3970 }
3971 __ Xor(dst, lhs, rhs_reg);
3972 }
3973 if (cond == kCondEQ) {
3974 __ Sltiu(dst, dst, 1);
3975 } else {
3976 __ Sltu(dst, ZERO, dst);
3977 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003978 }
3979 break;
3980
3981 case kCondLT:
3982 case kCondGE:
3983 if (use_imm && IsInt<16>(rhs_imm)) {
3984 __ Slti(dst, lhs, rhs_imm);
3985 } else {
3986 if (use_imm) {
3987 rhs_reg = TMP;
3988 __ LoadConst32(rhs_reg, rhs_imm);
3989 }
3990 __ Slt(dst, lhs, rhs_reg);
3991 }
3992 if (cond == kCondGE) {
3993 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3994 // only the slt instruction but no sge.
3995 __ Xori(dst, dst, 1);
3996 }
3997 break;
3998
3999 case kCondLE:
4000 case kCondGT:
4001 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4002 // Simulate lhs <= rhs via lhs < rhs + 1.
4003 __ Slti(dst, lhs, rhs_imm + 1);
4004 if (cond == kCondGT) {
4005 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4006 // only the slti instruction but no sgti.
4007 __ Xori(dst, dst, 1);
4008 }
4009 } else {
4010 if (use_imm) {
4011 rhs_reg = TMP;
4012 __ LoadConst32(rhs_reg, rhs_imm);
4013 }
4014 __ Slt(dst, rhs_reg, lhs);
4015 if (cond == kCondLE) {
4016 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4017 // only the slt instruction but no sle.
4018 __ Xori(dst, dst, 1);
4019 }
4020 }
4021 break;
4022
4023 case kCondB:
4024 case kCondAE:
4025 if (use_imm && IsInt<16>(rhs_imm)) {
4026 // Sltiu sign-extends its 16-bit immediate operand before
4027 // the comparison and thus lets us compare directly with
4028 // unsigned values in the ranges [0, 0x7fff] and
4029 // [0xffff8000, 0xffffffff].
4030 __ Sltiu(dst, lhs, rhs_imm);
4031 } else {
4032 if (use_imm) {
4033 rhs_reg = TMP;
4034 __ LoadConst32(rhs_reg, rhs_imm);
4035 }
4036 __ Sltu(dst, lhs, rhs_reg);
4037 }
4038 if (cond == kCondAE) {
4039 // Simulate lhs >= rhs via !(lhs < rhs) since there's
4040 // only the sltu instruction but no sgeu.
4041 __ Xori(dst, dst, 1);
4042 }
4043 break;
4044
4045 case kCondBE:
4046 case kCondA:
4047 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4048 // Simulate lhs <= rhs via lhs < rhs + 1.
4049 // Note that this only works if rhs + 1 does not overflow
4050 // to 0, hence the check above.
4051 // Sltiu sign-extends its 16-bit immediate operand before
4052 // the comparison and thus lets us compare directly with
4053 // unsigned values in the ranges [0, 0x7fff] and
4054 // [0xffff8000, 0xffffffff].
4055 __ Sltiu(dst, lhs, rhs_imm + 1);
4056 if (cond == kCondA) {
4057 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4058 // only the sltiu instruction but no sgtiu.
4059 __ Xori(dst, dst, 1);
4060 }
4061 } else {
4062 if (use_imm) {
4063 rhs_reg = TMP;
4064 __ LoadConst32(rhs_reg, rhs_imm);
4065 }
4066 __ Sltu(dst, rhs_reg, lhs);
4067 if (cond == kCondBE) {
4068 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4069 // only the sltu instruction but no sleu.
4070 __ Xori(dst, dst, 1);
4071 }
4072 }
4073 break;
4074 }
4075}
4076
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004077bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4078 LocationSummary* input_locations,
4079 Register dst) {
4080 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4081 Location rhs_location = input_locations->InAt(1);
4082 Register rhs_reg = ZERO;
4083 int64_t rhs_imm = 0;
4084 bool use_imm = rhs_location.IsConstant();
4085 if (use_imm) {
4086 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4087 } else {
4088 rhs_reg = rhs_location.AsRegister<Register>();
4089 }
4090
4091 switch (cond) {
4092 case kCondEQ:
4093 case kCondNE:
4094 if (use_imm && IsInt<16>(-rhs_imm)) {
4095 __ Addiu(dst, lhs, -rhs_imm);
4096 } else if (use_imm && IsUint<16>(rhs_imm)) {
4097 __ Xori(dst, lhs, rhs_imm);
4098 } else {
4099 if (use_imm) {
4100 rhs_reg = TMP;
4101 __ LoadConst32(rhs_reg, rhs_imm);
4102 }
4103 __ Xor(dst, lhs, rhs_reg);
4104 }
4105 return (cond == kCondEQ);
4106
4107 case kCondLT:
4108 case kCondGE:
4109 if (use_imm && IsInt<16>(rhs_imm)) {
4110 __ Slti(dst, lhs, rhs_imm);
4111 } else {
4112 if (use_imm) {
4113 rhs_reg = TMP;
4114 __ LoadConst32(rhs_reg, rhs_imm);
4115 }
4116 __ Slt(dst, lhs, rhs_reg);
4117 }
4118 return (cond == kCondGE);
4119
4120 case kCondLE:
4121 case kCondGT:
4122 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4123 // Simulate lhs <= rhs via lhs < rhs + 1.
4124 __ Slti(dst, lhs, rhs_imm + 1);
4125 return (cond == kCondGT);
4126 } else {
4127 if (use_imm) {
4128 rhs_reg = TMP;
4129 __ LoadConst32(rhs_reg, rhs_imm);
4130 }
4131 __ Slt(dst, rhs_reg, lhs);
4132 return (cond == kCondLE);
4133 }
4134
4135 case kCondB:
4136 case kCondAE:
4137 if (use_imm && IsInt<16>(rhs_imm)) {
4138 // Sltiu sign-extends its 16-bit immediate operand before
4139 // the comparison and thus lets us compare directly with
4140 // unsigned values in the ranges [0, 0x7fff] and
4141 // [0xffff8000, 0xffffffff].
4142 __ Sltiu(dst, lhs, rhs_imm);
4143 } else {
4144 if (use_imm) {
4145 rhs_reg = TMP;
4146 __ LoadConst32(rhs_reg, rhs_imm);
4147 }
4148 __ Sltu(dst, lhs, rhs_reg);
4149 }
4150 return (cond == kCondAE);
4151
4152 case kCondBE:
4153 case kCondA:
4154 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4155 // Simulate lhs <= rhs via lhs < rhs + 1.
4156 // Note that this only works if rhs + 1 does not overflow
4157 // to 0, hence the check above.
4158 // Sltiu sign-extends its 16-bit immediate operand before
4159 // the comparison and thus lets us compare directly with
4160 // unsigned values in the ranges [0, 0x7fff] and
4161 // [0xffff8000, 0xffffffff].
4162 __ Sltiu(dst, lhs, rhs_imm + 1);
4163 return (cond == kCondA);
4164 } else {
4165 if (use_imm) {
4166 rhs_reg = TMP;
4167 __ LoadConst32(rhs_reg, rhs_imm);
4168 }
4169 __ Sltu(dst, rhs_reg, lhs);
4170 return (cond == kCondBE);
4171 }
4172 }
4173}
4174
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004175void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4176 LocationSummary* locations,
4177 MipsLabel* label) {
4178 Register lhs = locations->InAt(0).AsRegister<Register>();
4179 Location rhs_location = locations->InAt(1);
4180 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004181 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004182 bool use_imm = rhs_location.IsConstant();
4183 if (use_imm) {
4184 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4185 } else {
4186 rhs_reg = rhs_location.AsRegister<Register>();
4187 }
4188
4189 if (use_imm && rhs_imm == 0) {
4190 switch (cond) {
4191 case kCondEQ:
4192 case kCondBE: // <= 0 if zero
4193 __ Beqz(lhs, label);
4194 break;
4195 case kCondNE:
4196 case kCondA: // > 0 if non-zero
4197 __ Bnez(lhs, label);
4198 break;
4199 case kCondLT:
4200 __ Bltz(lhs, label);
4201 break;
4202 case kCondGE:
4203 __ Bgez(lhs, label);
4204 break;
4205 case kCondLE:
4206 __ Blez(lhs, label);
4207 break;
4208 case kCondGT:
4209 __ Bgtz(lhs, label);
4210 break;
4211 case kCondB: // always false
4212 break;
4213 case kCondAE: // always true
4214 __ B(label);
4215 break;
4216 }
4217 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004218 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4219 if (isR6 || !use_imm) {
4220 if (use_imm) {
4221 rhs_reg = TMP;
4222 __ LoadConst32(rhs_reg, rhs_imm);
4223 }
4224 switch (cond) {
4225 case kCondEQ:
4226 __ Beq(lhs, rhs_reg, label);
4227 break;
4228 case kCondNE:
4229 __ Bne(lhs, rhs_reg, label);
4230 break;
4231 case kCondLT:
4232 __ Blt(lhs, rhs_reg, label);
4233 break;
4234 case kCondGE:
4235 __ Bge(lhs, rhs_reg, label);
4236 break;
4237 case kCondLE:
4238 __ Bge(rhs_reg, lhs, label);
4239 break;
4240 case kCondGT:
4241 __ Blt(rhs_reg, lhs, label);
4242 break;
4243 case kCondB:
4244 __ Bltu(lhs, rhs_reg, label);
4245 break;
4246 case kCondAE:
4247 __ Bgeu(lhs, rhs_reg, label);
4248 break;
4249 case kCondBE:
4250 __ Bgeu(rhs_reg, lhs, label);
4251 break;
4252 case kCondA:
4253 __ Bltu(rhs_reg, lhs, label);
4254 break;
4255 }
4256 } else {
4257 // Special cases for more efficient comparison with constants on R2.
4258 switch (cond) {
4259 case kCondEQ:
4260 __ LoadConst32(TMP, rhs_imm);
4261 __ Beq(lhs, TMP, label);
4262 break;
4263 case kCondNE:
4264 __ LoadConst32(TMP, rhs_imm);
4265 __ Bne(lhs, TMP, label);
4266 break;
4267 case kCondLT:
4268 if (IsInt<16>(rhs_imm)) {
4269 __ Slti(TMP, lhs, rhs_imm);
4270 __ Bnez(TMP, label);
4271 } else {
4272 __ LoadConst32(TMP, rhs_imm);
4273 __ Blt(lhs, TMP, label);
4274 }
4275 break;
4276 case kCondGE:
4277 if (IsInt<16>(rhs_imm)) {
4278 __ Slti(TMP, lhs, rhs_imm);
4279 __ Beqz(TMP, label);
4280 } else {
4281 __ LoadConst32(TMP, rhs_imm);
4282 __ Bge(lhs, TMP, label);
4283 }
4284 break;
4285 case kCondLE:
4286 if (IsInt<16>(rhs_imm + 1)) {
4287 // Simulate lhs <= rhs via lhs < rhs + 1.
4288 __ Slti(TMP, lhs, rhs_imm + 1);
4289 __ Bnez(TMP, label);
4290 } else {
4291 __ LoadConst32(TMP, rhs_imm);
4292 __ Bge(TMP, lhs, label);
4293 }
4294 break;
4295 case kCondGT:
4296 if (IsInt<16>(rhs_imm + 1)) {
4297 // Simulate lhs > rhs via !(lhs < rhs + 1).
4298 __ Slti(TMP, lhs, rhs_imm + 1);
4299 __ Beqz(TMP, label);
4300 } else {
4301 __ LoadConst32(TMP, rhs_imm);
4302 __ Blt(TMP, lhs, label);
4303 }
4304 break;
4305 case kCondB:
4306 if (IsInt<16>(rhs_imm)) {
4307 __ Sltiu(TMP, lhs, rhs_imm);
4308 __ Bnez(TMP, label);
4309 } else {
4310 __ LoadConst32(TMP, rhs_imm);
4311 __ Bltu(lhs, TMP, label);
4312 }
4313 break;
4314 case kCondAE:
4315 if (IsInt<16>(rhs_imm)) {
4316 __ Sltiu(TMP, lhs, rhs_imm);
4317 __ Beqz(TMP, label);
4318 } else {
4319 __ LoadConst32(TMP, rhs_imm);
4320 __ Bgeu(lhs, TMP, label);
4321 }
4322 break;
4323 case kCondBE:
4324 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4325 // Simulate lhs <= rhs via lhs < rhs + 1.
4326 // Note that this only works if rhs + 1 does not overflow
4327 // to 0, hence the check above.
4328 __ Sltiu(TMP, lhs, rhs_imm + 1);
4329 __ Bnez(TMP, label);
4330 } else {
4331 __ LoadConst32(TMP, rhs_imm);
4332 __ Bgeu(TMP, lhs, label);
4333 }
4334 break;
4335 case kCondA:
4336 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4337 // Simulate lhs > rhs via !(lhs < rhs + 1).
4338 // Note that this only works if rhs + 1 does not overflow
4339 // to 0, hence the check above.
4340 __ Sltiu(TMP, lhs, rhs_imm + 1);
4341 __ Beqz(TMP, label);
4342 } else {
4343 __ LoadConst32(TMP, rhs_imm);
4344 __ Bltu(TMP, lhs, label);
4345 }
4346 break;
4347 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004348 }
4349 }
4350}
4351
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004352void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4353 LocationSummary* locations) {
4354 Register dst = locations->Out().AsRegister<Register>();
4355 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4356 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4357 Location rhs_location = locations->InAt(1);
4358 Register rhs_high = ZERO;
4359 Register rhs_low = ZERO;
4360 int64_t imm = 0;
4361 uint32_t imm_high = 0;
4362 uint32_t imm_low = 0;
4363 bool use_imm = rhs_location.IsConstant();
4364 if (use_imm) {
4365 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4366 imm_high = High32Bits(imm);
4367 imm_low = Low32Bits(imm);
4368 } else {
4369 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4370 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4371 }
4372 if (use_imm && imm == 0) {
4373 switch (cond) {
4374 case kCondEQ:
4375 case kCondBE: // <= 0 if zero
4376 __ Or(dst, lhs_high, lhs_low);
4377 __ Sltiu(dst, dst, 1);
4378 break;
4379 case kCondNE:
4380 case kCondA: // > 0 if non-zero
4381 __ Or(dst, lhs_high, lhs_low);
4382 __ Sltu(dst, ZERO, dst);
4383 break;
4384 case kCondLT:
4385 __ Slt(dst, lhs_high, ZERO);
4386 break;
4387 case kCondGE:
4388 __ Slt(dst, lhs_high, ZERO);
4389 __ Xori(dst, dst, 1);
4390 break;
4391 case kCondLE:
4392 __ Or(TMP, lhs_high, lhs_low);
4393 __ Sra(AT, lhs_high, 31);
4394 __ Sltu(dst, AT, TMP);
4395 __ Xori(dst, dst, 1);
4396 break;
4397 case kCondGT:
4398 __ Or(TMP, lhs_high, lhs_low);
4399 __ Sra(AT, lhs_high, 31);
4400 __ Sltu(dst, AT, TMP);
4401 break;
4402 case kCondB: // always false
4403 __ Andi(dst, dst, 0);
4404 break;
4405 case kCondAE: // always true
4406 __ Ori(dst, ZERO, 1);
4407 break;
4408 }
4409 } else if (use_imm) {
4410 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4411 switch (cond) {
4412 case kCondEQ:
4413 __ LoadConst32(TMP, imm_high);
4414 __ Xor(TMP, TMP, lhs_high);
4415 __ LoadConst32(AT, imm_low);
4416 __ Xor(AT, AT, lhs_low);
4417 __ Or(dst, TMP, AT);
4418 __ Sltiu(dst, dst, 1);
4419 break;
4420 case kCondNE:
4421 __ LoadConst32(TMP, imm_high);
4422 __ Xor(TMP, TMP, lhs_high);
4423 __ LoadConst32(AT, imm_low);
4424 __ Xor(AT, AT, lhs_low);
4425 __ Or(dst, TMP, AT);
4426 __ Sltu(dst, ZERO, dst);
4427 break;
4428 case kCondLT:
4429 case kCondGE:
4430 if (dst == lhs_low) {
4431 __ LoadConst32(TMP, imm_low);
4432 __ Sltu(dst, lhs_low, TMP);
4433 }
4434 __ LoadConst32(TMP, imm_high);
4435 __ Slt(AT, lhs_high, TMP);
4436 __ Slt(TMP, TMP, lhs_high);
4437 if (dst != lhs_low) {
4438 __ LoadConst32(dst, imm_low);
4439 __ Sltu(dst, lhs_low, dst);
4440 }
4441 __ Slt(dst, TMP, dst);
4442 __ Or(dst, dst, AT);
4443 if (cond == kCondGE) {
4444 __ Xori(dst, dst, 1);
4445 }
4446 break;
4447 case kCondGT:
4448 case kCondLE:
4449 if (dst == lhs_low) {
4450 __ LoadConst32(TMP, imm_low);
4451 __ Sltu(dst, TMP, lhs_low);
4452 }
4453 __ LoadConst32(TMP, imm_high);
4454 __ Slt(AT, TMP, lhs_high);
4455 __ Slt(TMP, lhs_high, TMP);
4456 if (dst != lhs_low) {
4457 __ LoadConst32(dst, imm_low);
4458 __ Sltu(dst, dst, lhs_low);
4459 }
4460 __ Slt(dst, TMP, dst);
4461 __ Or(dst, dst, AT);
4462 if (cond == kCondLE) {
4463 __ Xori(dst, dst, 1);
4464 }
4465 break;
4466 case kCondB:
4467 case kCondAE:
4468 if (dst == lhs_low) {
4469 __ LoadConst32(TMP, imm_low);
4470 __ Sltu(dst, lhs_low, TMP);
4471 }
4472 __ LoadConst32(TMP, imm_high);
4473 __ Sltu(AT, lhs_high, TMP);
4474 __ Sltu(TMP, TMP, lhs_high);
4475 if (dst != lhs_low) {
4476 __ LoadConst32(dst, imm_low);
4477 __ Sltu(dst, lhs_low, dst);
4478 }
4479 __ Slt(dst, TMP, dst);
4480 __ Or(dst, dst, AT);
4481 if (cond == kCondAE) {
4482 __ Xori(dst, dst, 1);
4483 }
4484 break;
4485 case kCondA:
4486 case kCondBE:
4487 if (dst == lhs_low) {
4488 __ LoadConst32(TMP, imm_low);
4489 __ Sltu(dst, TMP, lhs_low);
4490 }
4491 __ LoadConst32(TMP, imm_high);
4492 __ Sltu(AT, TMP, lhs_high);
4493 __ Sltu(TMP, lhs_high, TMP);
4494 if (dst != lhs_low) {
4495 __ LoadConst32(dst, imm_low);
4496 __ Sltu(dst, dst, lhs_low);
4497 }
4498 __ Slt(dst, TMP, dst);
4499 __ Or(dst, dst, AT);
4500 if (cond == kCondBE) {
4501 __ Xori(dst, dst, 1);
4502 }
4503 break;
4504 }
4505 } else {
4506 switch (cond) {
4507 case kCondEQ:
4508 __ Xor(TMP, lhs_high, rhs_high);
4509 __ Xor(AT, lhs_low, rhs_low);
4510 __ Or(dst, TMP, AT);
4511 __ Sltiu(dst, dst, 1);
4512 break;
4513 case kCondNE:
4514 __ Xor(TMP, lhs_high, rhs_high);
4515 __ Xor(AT, lhs_low, rhs_low);
4516 __ Or(dst, TMP, AT);
4517 __ Sltu(dst, ZERO, dst);
4518 break;
4519 case kCondLT:
4520 case kCondGE:
4521 __ Slt(TMP, rhs_high, lhs_high);
4522 __ Sltu(AT, lhs_low, rhs_low);
4523 __ Slt(TMP, TMP, AT);
4524 __ Slt(AT, lhs_high, rhs_high);
4525 __ Or(dst, AT, TMP);
4526 if (cond == kCondGE) {
4527 __ Xori(dst, dst, 1);
4528 }
4529 break;
4530 case kCondGT:
4531 case kCondLE:
4532 __ Slt(TMP, lhs_high, rhs_high);
4533 __ Sltu(AT, rhs_low, lhs_low);
4534 __ Slt(TMP, TMP, AT);
4535 __ Slt(AT, rhs_high, lhs_high);
4536 __ Or(dst, AT, TMP);
4537 if (cond == kCondLE) {
4538 __ Xori(dst, dst, 1);
4539 }
4540 break;
4541 case kCondB:
4542 case kCondAE:
4543 __ Sltu(TMP, rhs_high, lhs_high);
4544 __ Sltu(AT, lhs_low, rhs_low);
4545 __ Slt(TMP, TMP, AT);
4546 __ Sltu(AT, lhs_high, rhs_high);
4547 __ Or(dst, AT, TMP);
4548 if (cond == kCondAE) {
4549 __ Xori(dst, dst, 1);
4550 }
4551 break;
4552 case kCondA:
4553 case kCondBE:
4554 __ Sltu(TMP, lhs_high, rhs_high);
4555 __ Sltu(AT, rhs_low, lhs_low);
4556 __ Slt(TMP, TMP, AT);
4557 __ Sltu(AT, rhs_high, lhs_high);
4558 __ Or(dst, AT, TMP);
4559 if (cond == kCondBE) {
4560 __ Xori(dst, dst, 1);
4561 }
4562 break;
4563 }
4564 }
4565}
4566
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004567void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4568 LocationSummary* locations,
4569 MipsLabel* label) {
4570 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4571 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4572 Location rhs_location = locations->InAt(1);
4573 Register rhs_high = ZERO;
4574 Register rhs_low = ZERO;
4575 int64_t imm = 0;
4576 uint32_t imm_high = 0;
4577 uint32_t imm_low = 0;
4578 bool use_imm = rhs_location.IsConstant();
4579 if (use_imm) {
4580 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4581 imm_high = High32Bits(imm);
4582 imm_low = Low32Bits(imm);
4583 } else {
4584 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4585 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4586 }
4587
4588 if (use_imm && imm == 0) {
4589 switch (cond) {
4590 case kCondEQ:
4591 case kCondBE: // <= 0 if zero
4592 __ Or(TMP, lhs_high, lhs_low);
4593 __ Beqz(TMP, label);
4594 break;
4595 case kCondNE:
4596 case kCondA: // > 0 if non-zero
4597 __ Or(TMP, lhs_high, lhs_low);
4598 __ Bnez(TMP, label);
4599 break;
4600 case kCondLT:
4601 __ Bltz(lhs_high, label);
4602 break;
4603 case kCondGE:
4604 __ Bgez(lhs_high, label);
4605 break;
4606 case kCondLE:
4607 __ Or(TMP, lhs_high, lhs_low);
4608 __ Sra(AT, lhs_high, 31);
4609 __ Bgeu(AT, TMP, label);
4610 break;
4611 case kCondGT:
4612 __ Or(TMP, lhs_high, lhs_low);
4613 __ Sra(AT, lhs_high, 31);
4614 __ Bltu(AT, TMP, label);
4615 break;
4616 case kCondB: // always false
4617 break;
4618 case kCondAE: // always true
4619 __ B(label);
4620 break;
4621 }
4622 } else if (use_imm) {
4623 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4624 switch (cond) {
4625 case kCondEQ:
4626 __ LoadConst32(TMP, imm_high);
4627 __ Xor(TMP, TMP, lhs_high);
4628 __ LoadConst32(AT, imm_low);
4629 __ Xor(AT, AT, lhs_low);
4630 __ Or(TMP, TMP, AT);
4631 __ Beqz(TMP, label);
4632 break;
4633 case kCondNE:
4634 __ LoadConst32(TMP, imm_high);
4635 __ Xor(TMP, TMP, lhs_high);
4636 __ LoadConst32(AT, imm_low);
4637 __ Xor(AT, AT, lhs_low);
4638 __ Or(TMP, TMP, AT);
4639 __ Bnez(TMP, label);
4640 break;
4641 case kCondLT:
4642 __ LoadConst32(TMP, imm_high);
4643 __ Blt(lhs_high, TMP, label);
4644 __ Slt(TMP, TMP, lhs_high);
4645 __ LoadConst32(AT, imm_low);
4646 __ Sltu(AT, lhs_low, AT);
4647 __ Blt(TMP, AT, label);
4648 break;
4649 case kCondGE:
4650 __ LoadConst32(TMP, imm_high);
4651 __ Blt(TMP, lhs_high, label);
4652 __ Slt(TMP, lhs_high, TMP);
4653 __ LoadConst32(AT, imm_low);
4654 __ Sltu(AT, lhs_low, AT);
4655 __ Or(TMP, TMP, AT);
4656 __ Beqz(TMP, label);
4657 break;
4658 case kCondLE:
4659 __ LoadConst32(TMP, imm_high);
4660 __ Blt(lhs_high, TMP, label);
4661 __ Slt(TMP, TMP, lhs_high);
4662 __ LoadConst32(AT, imm_low);
4663 __ Sltu(AT, AT, lhs_low);
4664 __ Or(TMP, TMP, AT);
4665 __ Beqz(TMP, label);
4666 break;
4667 case kCondGT:
4668 __ LoadConst32(TMP, imm_high);
4669 __ Blt(TMP, lhs_high, label);
4670 __ Slt(TMP, lhs_high, TMP);
4671 __ LoadConst32(AT, imm_low);
4672 __ Sltu(AT, AT, lhs_low);
4673 __ Blt(TMP, AT, label);
4674 break;
4675 case kCondB:
4676 __ LoadConst32(TMP, imm_high);
4677 __ Bltu(lhs_high, TMP, label);
4678 __ Sltu(TMP, TMP, lhs_high);
4679 __ LoadConst32(AT, imm_low);
4680 __ Sltu(AT, lhs_low, AT);
4681 __ Blt(TMP, AT, label);
4682 break;
4683 case kCondAE:
4684 __ LoadConst32(TMP, imm_high);
4685 __ Bltu(TMP, lhs_high, label);
4686 __ Sltu(TMP, lhs_high, TMP);
4687 __ LoadConst32(AT, imm_low);
4688 __ Sltu(AT, lhs_low, AT);
4689 __ Or(TMP, TMP, AT);
4690 __ Beqz(TMP, label);
4691 break;
4692 case kCondBE:
4693 __ LoadConst32(TMP, imm_high);
4694 __ Bltu(lhs_high, TMP, label);
4695 __ Sltu(TMP, TMP, lhs_high);
4696 __ LoadConst32(AT, imm_low);
4697 __ Sltu(AT, AT, lhs_low);
4698 __ Or(TMP, TMP, AT);
4699 __ Beqz(TMP, label);
4700 break;
4701 case kCondA:
4702 __ LoadConst32(TMP, imm_high);
4703 __ Bltu(TMP, lhs_high, label);
4704 __ Sltu(TMP, lhs_high, TMP);
4705 __ LoadConst32(AT, imm_low);
4706 __ Sltu(AT, AT, lhs_low);
4707 __ Blt(TMP, AT, label);
4708 break;
4709 }
4710 } else {
4711 switch (cond) {
4712 case kCondEQ:
4713 __ Xor(TMP, lhs_high, rhs_high);
4714 __ Xor(AT, lhs_low, rhs_low);
4715 __ Or(TMP, TMP, AT);
4716 __ Beqz(TMP, label);
4717 break;
4718 case kCondNE:
4719 __ Xor(TMP, lhs_high, rhs_high);
4720 __ Xor(AT, lhs_low, rhs_low);
4721 __ Or(TMP, TMP, AT);
4722 __ Bnez(TMP, label);
4723 break;
4724 case kCondLT:
4725 __ Blt(lhs_high, rhs_high, label);
4726 __ Slt(TMP, rhs_high, lhs_high);
4727 __ Sltu(AT, lhs_low, rhs_low);
4728 __ Blt(TMP, AT, label);
4729 break;
4730 case kCondGE:
4731 __ Blt(rhs_high, lhs_high, label);
4732 __ Slt(TMP, lhs_high, rhs_high);
4733 __ Sltu(AT, lhs_low, rhs_low);
4734 __ Or(TMP, TMP, AT);
4735 __ Beqz(TMP, label);
4736 break;
4737 case kCondLE:
4738 __ Blt(lhs_high, rhs_high, label);
4739 __ Slt(TMP, rhs_high, lhs_high);
4740 __ Sltu(AT, rhs_low, lhs_low);
4741 __ Or(TMP, TMP, AT);
4742 __ Beqz(TMP, label);
4743 break;
4744 case kCondGT:
4745 __ Blt(rhs_high, lhs_high, label);
4746 __ Slt(TMP, lhs_high, rhs_high);
4747 __ Sltu(AT, rhs_low, lhs_low);
4748 __ Blt(TMP, AT, label);
4749 break;
4750 case kCondB:
4751 __ Bltu(lhs_high, rhs_high, label);
4752 __ Sltu(TMP, rhs_high, lhs_high);
4753 __ Sltu(AT, lhs_low, rhs_low);
4754 __ Blt(TMP, AT, label);
4755 break;
4756 case kCondAE:
4757 __ Bltu(rhs_high, lhs_high, label);
4758 __ Sltu(TMP, lhs_high, rhs_high);
4759 __ Sltu(AT, lhs_low, rhs_low);
4760 __ Or(TMP, TMP, AT);
4761 __ Beqz(TMP, label);
4762 break;
4763 case kCondBE:
4764 __ Bltu(lhs_high, rhs_high, label);
4765 __ Sltu(TMP, rhs_high, lhs_high);
4766 __ Sltu(AT, rhs_low, lhs_low);
4767 __ Or(TMP, TMP, AT);
4768 __ Beqz(TMP, label);
4769 break;
4770 case kCondA:
4771 __ Bltu(rhs_high, lhs_high, label);
4772 __ Sltu(TMP, lhs_high, rhs_high);
4773 __ Sltu(AT, rhs_low, lhs_low);
4774 __ Blt(TMP, AT, label);
4775 break;
4776 }
4777 }
4778}
4779
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004780void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4781 bool gt_bias,
4782 Primitive::Type type,
4783 LocationSummary* locations) {
4784 Register dst = locations->Out().AsRegister<Register>();
4785 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4786 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4787 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4788 if (type == Primitive::kPrimFloat) {
4789 if (isR6) {
4790 switch (cond) {
4791 case kCondEQ:
4792 __ CmpEqS(FTMP, lhs, rhs);
4793 __ Mfc1(dst, FTMP);
4794 __ Andi(dst, dst, 1);
4795 break;
4796 case kCondNE:
4797 __ CmpEqS(FTMP, lhs, rhs);
4798 __ Mfc1(dst, FTMP);
4799 __ Addiu(dst, dst, 1);
4800 break;
4801 case kCondLT:
4802 if (gt_bias) {
4803 __ CmpLtS(FTMP, lhs, rhs);
4804 } else {
4805 __ CmpUltS(FTMP, lhs, rhs);
4806 }
4807 __ Mfc1(dst, FTMP);
4808 __ Andi(dst, dst, 1);
4809 break;
4810 case kCondLE:
4811 if (gt_bias) {
4812 __ CmpLeS(FTMP, lhs, rhs);
4813 } else {
4814 __ CmpUleS(FTMP, lhs, rhs);
4815 }
4816 __ Mfc1(dst, FTMP);
4817 __ Andi(dst, dst, 1);
4818 break;
4819 case kCondGT:
4820 if (gt_bias) {
4821 __ CmpUltS(FTMP, rhs, lhs);
4822 } else {
4823 __ CmpLtS(FTMP, rhs, lhs);
4824 }
4825 __ Mfc1(dst, FTMP);
4826 __ Andi(dst, dst, 1);
4827 break;
4828 case kCondGE:
4829 if (gt_bias) {
4830 __ CmpUleS(FTMP, rhs, lhs);
4831 } else {
4832 __ CmpLeS(FTMP, rhs, lhs);
4833 }
4834 __ Mfc1(dst, FTMP);
4835 __ Andi(dst, dst, 1);
4836 break;
4837 default:
4838 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4839 UNREACHABLE();
4840 }
4841 } else {
4842 switch (cond) {
4843 case kCondEQ:
4844 __ CeqS(0, lhs, rhs);
4845 __ LoadConst32(dst, 1);
4846 __ Movf(dst, ZERO, 0);
4847 break;
4848 case kCondNE:
4849 __ CeqS(0, lhs, rhs);
4850 __ LoadConst32(dst, 1);
4851 __ Movt(dst, ZERO, 0);
4852 break;
4853 case kCondLT:
4854 if (gt_bias) {
4855 __ ColtS(0, lhs, rhs);
4856 } else {
4857 __ CultS(0, lhs, rhs);
4858 }
4859 __ LoadConst32(dst, 1);
4860 __ Movf(dst, ZERO, 0);
4861 break;
4862 case kCondLE:
4863 if (gt_bias) {
4864 __ ColeS(0, lhs, rhs);
4865 } else {
4866 __ CuleS(0, lhs, rhs);
4867 }
4868 __ LoadConst32(dst, 1);
4869 __ Movf(dst, ZERO, 0);
4870 break;
4871 case kCondGT:
4872 if (gt_bias) {
4873 __ CultS(0, rhs, lhs);
4874 } else {
4875 __ ColtS(0, rhs, lhs);
4876 }
4877 __ LoadConst32(dst, 1);
4878 __ Movf(dst, ZERO, 0);
4879 break;
4880 case kCondGE:
4881 if (gt_bias) {
4882 __ CuleS(0, rhs, lhs);
4883 } else {
4884 __ ColeS(0, rhs, lhs);
4885 }
4886 __ LoadConst32(dst, 1);
4887 __ Movf(dst, ZERO, 0);
4888 break;
4889 default:
4890 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4891 UNREACHABLE();
4892 }
4893 }
4894 } else {
4895 DCHECK_EQ(type, Primitive::kPrimDouble);
4896 if (isR6) {
4897 switch (cond) {
4898 case kCondEQ:
4899 __ CmpEqD(FTMP, lhs, rhs);
4900 __ Mfc1(dst, FTMP);
4901 __ Andi(dst, dst, 1);
4902 break;
4903 case kCondNE:
4904 __ CmpEqD(FTMP, lhs, rhs);
4905 __ Mfc1(dst, FTMP);
4906 __ Addiu(dst, dst, 1);
4907 break;
4908 case kCondLT:
4909 if (gt_bias) {
4910 __ CmpLtD(FTMP, lhs, rhs);
4911 } else {
4912 __ CmpUltD(FTMP, lhs, rhs);
4913 }
4914 __ Mfc1(dst, FTMP);
4915 __ Andi(dst, dst, 1);
4916 break;
4917 case kCondLE:
4918 if (gt_bias) {
4919 __ CmpLeD(FTMP, lhs, rhs);
4920 } else {
4921 __ CmpUleD(FTMP, lhs, rhs);
4922 }
4923 __ Mfc1(dst, FTMP);
4924 __ Andi(dst, dst, 1);
4925 break;
4926 case kCondGT:
4927 if (gt_bias) {
4928 __ CmpUltD(FTMP, rhs, lhs);
4929 } else {
4930 __ CmpLtD(FTMP, rhs, lhs);
4931 }
4932 __ Mfc1(dst, FTMP);
4933 __ Andi(dst, dst, 1);
4934 break;
4935 case kCondGE:
4936 if (gt_bias) {
4937 __ CmpUleD(FTMP, rhs, lhs);
4938 } else {
4939 __ CmpLeD(FTMP, rhs, lhs);
4940 }
4941 __ Mfc1(dst, FTMP);
4942 __ Andi(dst, dst, 1);
4943 break;
4944 default:
4945 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4946 UNREACHABLE();
4947 }
4948 } else {
4949 switch (cond) {
4950 case kCondEQ:
4951 __ CeqD(0, lhs, rhs);
4952 __ LoadConst32(dst, 1);
4953 __ Movf(dst, ZERO, 0);
4954 break;
4955 case kCondNE:
4956 __ CeqD(0, lhs, rhs);
4957 __ LoadConst32(dst, 1);
4958 __ Movt(dst, ZERO, 0);
4959 break;
4960 case kCondLT:
4961 if (gt_bias) {
4962 __ ColtD(0, lhs, rhs);
4963 } else {
4964 __ CultD(0, lhs, rhs);
4965 }
4966 __ LoadConst32(dst, 1);
4967 __ Movf(dst, ZERO, 0);
4968 break;
4969 case kCondLE:
4970 if (gt_bias) {
4971 __ ColeD(0, lhs, rhs);
4972 } else {
4973 __ CuleD(0, lhs, rhs);
4974 }
4975 __ LoadConst32(dst, 1);
4976 __ Movf(dst, ZERO, 0);
4977 break;
4978 case kCondGT:
4979 if (gt_bias) {
4980 __ CultD(0, rhs, lhs);
4981 } else {
4982 __ ColtD(0, rhs, lhs);
4983 }
4984 __ LoadConst32(dst, 1);
4985 __ Movf(dst, ZERO, 0);
4986 break;
4987 case kCondGE:
4988 if (gt_bias) {
4989 __ CuleD(0, rhs, lhs);
4990 } else {
4991 __ ColeD(0, rhs, lhs);
4992 }
4993 __ LoadConst32(dst, 1);
4994 __ Movf(dst, ZERO, 0);
4995 break;
4996 default:
4997 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4998 UNREACHABLE();
4999 }
5000 }
5001 }
5002}
5003
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005004bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
5005 bool gt_bias,
5006 Primitive::Type type,
5007 LocationSummary* input_locations,
5008 int cc) {
5009 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5010 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5011 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
5012 if (type == Primitive::kPrimFloat) {
5013 switch (cond) {
5014 case kCondEQ:
5015 __ CeqS(cc, lhs, rhs);
5016 return false;
5017 case kCondNE:
5018 __ CeqS(cc, lhs, rhs);
5019 return true;
5020 case kCondLT:
5021 if (gt_bias) {
5022 __ ColtS(cc, lhs, rhs);
5023 } else {
5024 __ CultS(cc, lhs, rhs);
5025 }
5026 return false;
5027 case kCondLE:
5028 if (gt_bias) {
5029 __ ColeS(cc, lhs, rhs);
5030 } else {
5031 __ CuleS(cc, lhs, rhs);
5032 }
5033 return false;
5034 case kCondGT:
5035 if (gt_bias) {
5036 __ CultS(cc, rhs, lhs);
5037 } else {
5038 __ ColtS(cc, rhs, lhs);
5039 }
5040 return false;
5041 case kCondGE:
5042 if (gt_bias) {
5043 __ CuleS(cc, rhs, lhs);
5044 } else {
5045 __ ColeS(cc, rhs, lhs);
5046 }
5047 return false;
5048 default:
5049 LOG(FATAL) << "Unexpected non-floating-point condition";
5050 UNREACHABLE();
5051 }
5052 } else {
5053 DCHECK_EQ(type, Primitive::kPrimDouble);
5054 switch (cond) {
5055 case kCondEQ:
5056 __ CeqD(cc, lhs, rhs);
5057 return false;
5058 case kCondNE:
5059 __ CeqD(cc, lhs, rhs);
5060 return true;
5061 case kCondLT:
5062 if (gt_bias) {
5063 __ ColtD(cc, lhs, rhs);
5064 } else {
5065 __ CultD(cc, lhs, rhs);
5066 }
5067 return false;
5068 case kCondLE:
5069 if (gt_bias) {
5070 __ ColeD(cc, lhs, rhs);
5071 } else {
5072 __ CuleD(cc, lhs, rhs);
5073 }
5074 return false;
5075 case kCondGT:
5076 if (gt_bias) {
5077 __ CultD(cc, rhs, lhs);
5078 } else {
5079 __ ColtD(cc, rhs, lhs);
5080 }
5081 return false;
5082 case kCondGE:
5083 if (gt_bias) {
5084 __ CuleD(cc, rhs, lhs);
5085 } else {
5086 __ ColeD(cc, rhs, lhs);
5087 }
5088 return false;
5089 default:
5090 LOG(FATAL) << "Unexpected non-floating-point condition";
5091 UNREACHABLE();
5092 }
5093 }
5094}
5095
5096bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5097 bool gt_bias,
5098 Primitive::Type type,
5099 LocationSummary* input_locations,
5100 FRegister dst) {
5101 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5102 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5103 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5104 if (type == Primitive::kPrimFloat) {
5105 switch (cond) {
5106 case kCondEQ:
5107 __ CmpEqS(dst, lhs, rhs);
5108 return false;
5109 case kCondNE:
5110 __ CmpEqS(dst, lhs, rhs);
5111 return true;
5112 case kCondLT:
5113 if (gt_bias) {
5114 __ CmpLtS(dst, lhs, rhs);
5115 } else {
5116 __ CmpUltS(dst, lhs, rhs);
5117 }
5118 return false;
5119 case kCondLE:
5120 if (gt_bias) {
5121 __ CmpLeS(dst, lhs, rhs);
5122 } else {
5123 __ CmpUleS(dst, lhs, rhs);
5124 }
5125 return false;
5126 case kCondGT:
5127 if (gt_bias) {
5128 __ CmpUltS(dst, rhs, lhs);
5129 } else {
5130 __ CmpLtS(dst, rhs, lhs);
5131 }
5132 return false;
5133 case kCondGE:
5134 if (gt_bias) {
5135 __ CmpUleS(dst, rhs, lhs);
5136 } else {
5137 __ CmpLeS(dst, rhs, lhs);
5138 }
5139 return false;
5140 default:
5141 LOG(FATAL) << "Unexpected non-floating-point condition";
5142 UNREACHABLE();
5143 }
5144 } else {
5145 DCHECK_EQ(type, Primitive::kPrimDouble);
5146 switch (cond) {
5147 case kCondEQ:
5148 __ CmpEqD(dst, lhs, rhs);
5149 return false;
5150 case kCondNE:
5151 __ CmpEqD(dst, lhs, rhs);
5152 return true;
5153 case kCondLT:
5154 if (gt_bias) {
5155 __ CmpLtD(dst, lhs, rhs);
5156 } else {
5157 __ CmpUltD(dst, lhs, rhs);
5158 }
5159 return false;
5160 case kCondLE:
5161 if (gt_bias) {
5162 __ CmpLeD(dst, lhs, rhs);
5163 } else {
5164 __ CmpUleD(dst, lhs, rhs);
5165 }
5166 return false;
5167 case kCondGT:
5168 if (gt_bias) {
5169 __ CmpUltD(dst, rhs, lhs);
5170 } else {
5171 __ CmpLtD(dst, rhs, lhs);
5172 }
5173 return false;
5174 case kCondGE:
5175 if (gt_bias) {
5176 __ CmpUleD(dst, rhs, lhs);
5177 } else {
5178 __ CmpLeD(dst, rhs, lhs);
5179 }
5180 return false;
5181 default:
5182 LOG(FATAL) << "Unexpected non-floating-point condition";
5183 UNREACHABLE();
5184 }
5185 }
5186}
5187
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005188void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5189 bool gt_bias,
5190 Primitive::Type type,
5191 LocationSummary* locations,
5192 MipsLabel* label) {
5193 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5194 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5195 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5196 if (type == Primitive::kPrimFloat) {
5197 if (isR6) {
5198 switch (cond) {
5199 case kCondEQ:
5200 __ CmpEqS(FTMP, lhs, rhs);
5201 __ Bc1nez(FTMP, label);
5202 break;
5203 case kCondNE:
5204 __ CmpEqS(FTMP, lhs, rhs);
5205 __ Bc1eqz(FTMP, label);
5206 break;
5207 case kCondLT:
5208 if (gt_bias) {
5209 __ CmpLtS(FTMP, lhs, rhs);
5210 } else {
5211 __ CmpUltS(FTMP, lhs, rhs);
5212 }
5213 __ Bc1nez(FTMP, label);
5214 break;
5215 case kCondLE:
5216 if (gt_bias) {
5217 __ CmpLeS(FTMP, lhs, rhs);
5218 } else {
5219 __ CmpUleS(FTMP, lhs, rhs);
5220 }
5221 __ Bc1nez(FTMP, label);
5222 break;
5223 case kCondGT:
5224 if (gt_bias) {
5225 __ CmpUltS(FTMP, rhs, lhs);
5226 } else {
5227 __ CmpLtS(FTMP, rhs, lhs);
5228 }
5229 __ Bc1nez(FTMP, label);
5230 break;
5231 case kCondGE:
5232 if (gt_bias) {
5233 __ CmpUleS(FTMP, rhs, lhs);
5234 } else {
5235 __ CmpLeS(FTMP, rhs, lhs);
5236 }
5237 __ Bc1nez(FTMP, label);
5238 break;
5239 default:
5240 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005241 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005242 }
5243 } else {
5244 switch (cond) {
5245 case kCondEQ:
5246 __ CeqS(0, lhs, rhs);
5247 __ Bc1t(0, label);
5248 break;
5249 case kCondNE:
5250 __ CeqS(0, lhs, rhs);
5251 __ Bc1f(0, label);
5252 break;
5253 case kCondLT:
5254 if (gt_bias) {
5255 __ ColtS(0, lhs, rhs);
5256 } else {
5257 __ CultS(0, lhs, rhs);
5258 }
5259 __ Bc1t(0, label);
5260 break;
5261 case kCondLE:
5262 if (gt_bias) {
5263 __ ColeS(0, lhs, rhs);
5264 } else {
5265 __ CuleS(0, lhs, rhs);
5266 }
5267 __ Bc1t(0, label);
5268 break;
5269 case kCondGT:
5270 if (gt_bias) {
5271 __ CultS(0, rhs, lhs);
5272 } else {
5273 __ ColtS(0, rhs, lhs);
5274 }
5275 __ Bc1t(0, label);
5276 break;
5277 case kCondGE:
5278 if (gt_bias) {
5279 __ CuleS(0, rhs, lhs);
5280 } else {
5281 __ ColeS(0, rhs, lhs);
5282 }
5283 __ Bc1t(0, label);
5284 break;
5285 default:
5286 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005287 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005288 }
5289 }
5290 } else {
5291 DCHECK_EQ(type, Primitive::kPrimDouble);
5292 if (isR6) {
5293 switch (cond) {
5294 case kCondEQ:
5295 __ CmpEqD(FTMP, lhs, rhs);
5296 __ Bc1nez(FTMP, label);
5297 break;
5298 case kCondNE:
5299 __ CmpEqD(FTMP, lhs, rhs);
5300 __ Bc1eqz(FTMP, label);
5301 break;
5302 case kCondLT:
5303 if (gt_bias) {
5304 __ CmpLtD(FTMP, lhs, rhs);
5305 } else {
5306 __ CmpUltD(FTMP, lhs, rhs);
5307 }
5308 __ Bc1nez(FTMP, label);
5309 break;
5310 case kCondLE:
5311 if (gt_bias) {
5312 __ CmpLeD(FTMP, lhs, rhs);
5313 } else {
5314 __ CmpUleD(FTMP, lhs, rhs);
5315 }
5316 __ Bc1nez(FTMP, label);
5317 break;
5318 case kCondGT:
5319 if (gt_bias) {
5320 __ CmpUltD(FTMP, rhs, lhs);
5321 } else {
5322 __ CmpLtD(FTMP, rhs, lhs);
5323 }
5324 __ Bc1nez(FTMP, label);
5325 break;
5326 case kCondGE:
5327 if (gt_bias) {
5328 __ CmpUleD(FTMP, rhs, lhs);
5329 } else {
5330 __ CmpLeD(FTMP, rhs, lhs);
5331 }
5332 __ Bc1nez(FTMP, label);
5333 break;
5334 default:
5335 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005336 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005337 }
5338 } else {
5339 switch (cond) {
5340 case kCondEQ:
5341 __ CeqD(0, lhs, rhs);
5342 __ Bc1t(0, label);
5343 break;
5344 case kCondNE:
5345 __ CeqD(0, lhs, rhs);
5346 __ Bc1f(0, label);
5347 break;
5348 case kCondLT:
5349 if (gt_bias) {
5350 __ ColtD(0, lhs, rhs);
5351 } else {
5352 __ CultD(0, lhs, rhs);
5353 }
5354 __ Bc1t(0, label);
5355 break;
5356 case kCondLE:
5357 if (gt_bias) {
5358 __ ColeD(0, lhs, rhs);
5359 } else {
5360 __ CuleD(0, lhs, rhs);
5361 }
5362 __ Bc1t(0, label);
5363 break;
5364 case kCondGT:
5365 if (gt_bias) {
5366 __ CultD(0, rhs, lhs);
5367 } else {
5368 __ ColtD(0, rhs, lhs);
5369 }
5370 __ Bc1t(0, label);
5371 break;
5372 case kCondGE:
5373 if (gt_bias) {
5374 __ CuleD(0, rhs, lhs);
5375 } else {
5376 __ ColeD(0, rhs, lhs);
5377 }
5378 __ Bc1t(0, label);
5379 break;
5380 default:
5381 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005382 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005383 }
5384 }
5385 }
5386}
5387
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005388void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005389 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005390 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005391 MipsLabel* false_target) {
5392 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005393
David Brazdil0debae72015-11-12 18:37:00 +00005394 if (true_target == nullptr && false_target == nullptr) {
5395 // Nothing to do. The code always falls through.
5396 return;
5397 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005398 // Constant condition, statically compared against "true" (integer value 1).
5399 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005400 if (true_target != nullptr) {
5401 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005402 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005403 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005404 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005405 if (false_target != nullptr) {
5406 __ B(false_target);
5407 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005408 }
David Brazdil0debae72015-11-12 18:37:00 +00005409 return;
5410 }
5411
5412 // The following code generates these patterns:
5413 // (1) true_target == nullptr && false_target != nullptr
5414 // - opposite condition true => branch to false_target
5415 // (2) true_target != nullptr && false_target == nullptr
5416 // - condition true => branch to true_target
5417 // (3) true_target != nullptr && false_target != nullptr
5418 // - condition true => branch to true_target
5419 // - branch to false_target
5420 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005421 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005422 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005423 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005424 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005425 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5426 } else {
5427 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5428 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005429 } else {
5430 // The condition instruction has not been materialized, use its inputs as
5431 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005432 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005433 Primitive::Type type = condition->InputAt(0)->GetType();
5434 LocationSummary* locations = cond->GetLocations();
5435 IfCondition if_cond = condition->GetCondition();
5436 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005437
David Brazdil0debae72015-11-12 18:37:00 +00005438 if (true_target == nullptr) {
5439 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005440 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005441 }
5442
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005443 switch (type) {
5444 default:
5445 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5446 break;
5447 case Primitive::kPrimLong:
5448 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5449 break;
5450 case Primitive::kPrimFloat:
5451 case Primitive::kPrimDouble:
5452 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5453 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005454 }
5455 }
David Brazdil0debae72015-11-12 18:37:00 +00005456
5457 // If neither branch falls through (case 3), the conditional branch to `true_target`
5458 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5459 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005460 __ B(false_target);
5461 }
5462}
5463
5464void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5465 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005466 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005467 locations->SetInAt(0, Location::RequiresRegister());
5468 }
5469}
5470
5471void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005472 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5473 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5474 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5475 nullptr : codegen_->GetLabelOf(true_successor);
5476 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5477 nullptr : codegen_->GetLabelOf(false_successor);
5478 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005479}
5480
5481void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5482 LocationSummary* locations = new (GetGraph()->GetArena())
5483 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005484 InvokeRuntimeCallingConvention calling_convention;
5485 RegisterSet caller_saves = RegisterSet::Empty();
5486 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5487 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005488 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005489 locations->SetInAt(0, Location::RequiresRegister());
5490 }
5491}
5492
5493void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005494 SlowPathCodeMIPS* slow_path =
5495 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005496 GenerateTestAndBranch(deoptimize,
5497 /* condition_input_index */ 0,
5498 slow_path->GetEntryLabel(),
5499 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005500}
5501
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005502// This function returns true if a conditional move can be generated for HSelect.
5503// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5504// branches and regular moves.
5505//
5506// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5507//
5508// While determining feasibility of a conditional move and setting inputs/outputs
5509// are two distinct tasks, this function does both because they share quite a bit
5510// of common logic.
5511static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5512 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5513 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5514 HCondition* condition = cond->AsCondition();
5515
5516 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5517 Primitive::Type dst_type = select->GetType();
5518
5519 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5520 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5521 bool is_true_value_zero_constant =
5522 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5523 bool is_false_value_zero_constant =
5524 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5525
5526 bool can_move_conditionally = false;
5527 bool use_const_for_false_in = false;
5528 bool use_const_for_true_in = false;
5529
5530 if (!cond->IsConstant()) {
5531 switch (cond_type) {
5532 default:
5533 switch (dst_type) {
5534 default:
5535 // Moving int on int condition.
5536 if (is_r6) {
5537 if (is_true_value_zero_constant) {
5538 // seleqz out_reg, false_reg, cond_reg
5539 can_move_conditionally = true;
5540 use_const_for_true_in = true;
5541 } else if (is_false_value_zero_constant) {
5542 // selnez out_reg, true_reg, cond_reg
5543 can_move_conditionally = true;
5544 use_const_for_false_in = true;
5545 } else if (materialized) {
5546 // Not materializing unmaterialized int conditions
5547 // to keep the instruction count low.
5548 // selnez AT, true_reg, cond_reg
5549 // seleqz TMP, false_reg, cond_reg
5550 // or out_reg, AT, TMP
5551 can_move_conditionally = true;
5552 }
5553 } else {
5554 // movn out_reg, true_reg/ZERO, cond_reg
5555 can_move_conditionally = true;
5556 use_const_for_true_in = is_true_value_zero_constant;
5557 }
5558 break;
5559 case Primitive::kPrimLong:
5560 // Moving long on int condition.
5561 if (is_r6) {
5562 if (is_true_value_zero_constant) {
5563 // seleqz out_reg_lo, false_reg_lo, cond_reg
5564 // seleqz out_reg_hi, false_reg_hi, cond_reg
5565 can_move_conditionally = true;
5566 use_const_for_true_in = true;
5567 } else if (is_false_value_zero_constant) {
5568 // selnez out_reg_lo, true_reg_lo, cond_reg
5569 // selnez out_reg_hi, true_reg_hi, cond_reg
5570 can_move_conditionally = true;
5571 use_const_for_false_in = true;
5572 }
5573 // Other long conditional moves would generate 6+ instructions,
5574 // which is too many.
5575 } else {
5576 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5577 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5578 can_move_conditionally = true;
5579 use_const_for_true_in = is_true_value_zero_constant;
5580 }
5581 break;
5582 case Primitive::kPrimFloat:
5583 case Primitive::kPrimDouble:
5584 // Moving float/double on int condition.
5585 if (is_r6) {
5586 if (materialized) {
5587 // Not materializing unmaterialized int conditions
5588 // to keep the instruction count low.
5589 can_move_conditionally = true;
5590 if (is_true_value_zero_constant) {
5591 // sltu TMP, ZERO, cond_reg
5592 // mtc1 TMP, temp_cond_reg
5593 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5594 use_const_for_true_in = true;
5595 } else if (is_false_value_zero_constant) {
5596 // sltu TMP, ZERO, cond_reg
5597 // mtc1 TMP, temp_cond_reg
5598 // selnez.fmt out_reg, true_reg, temp_cond_reg
5599 use_const_for_false_in = true;
5600 } else {
5601 // sltu TMP, ZERO, cond_reg
5602 // mtc1 TMP, temp_cond_reg
5603 // sel.fmt temp_cond_reg, false_reg, true_reg
5604 // mov.fmt out_reg, temp_cond_reg
5605 }
5606 }
5607 } else {
5608 // movn.fmt out_reg, true_reg, cond_reg
5609 can_move_conditionally = true;
5610 }
5611 break;
5612 }
5613 break;
5614 case Primitive::kPrimLong:
5615 // We don't materialize long comparison now
5616 // and use conditional branches instead.
5617 break;
5618 case Primitive::kPrimFloat:
5619 case Primitive::kPrimDouble:
5620 switch (dst_type) {
5621 default:
5622 // Moving int on float/double condition.
5623 if (is_r6) {
5624 if (is_true_value_zero_constant) {
5625 // mfc1 TMP, temp_cond_reg
5626 // seleqz out_reg, false_reg, TMP
5627 can_move_conditionally = true;
5628 use_const_for_true_in = true;
5629 } else if (is_false_value_zero_constant) {
5630 // mfc1 TMP, temp_cond_reg
5631 // selnez out_reg, true_reg, TMP
5632 can_move_conditionally = true;
5633 use_const_for_false_in = true;
5634 } else {
5635 // mfc1 TMP, temp_cond_reg
5636 // selnez AT, true_reg, TMP
5637 // seleqz TMP, false_reg, TMP
5638 // or out_reg, AT, TMP
5639 can_move_conditionally = true;
5640 }
5641 } else {
5642 // movt out_reg, true_reg/ZERO, cc
5643 can_move_conditionally = true;
5644 use_const_for_true_in = is_true_value_zero_constant;
5645 }
5646 break;
5647 case Primitive::kPrimLong:
5648 // Moving long on float/double condition.
5649 if (is_r6) {
5650 if (is_true_value_zero_constant) {
5651 // mfc1 TMP, temp_cond_reg
5652 // seleqz out_reg_lo, false_reg_lo, TMP
5653 // seleqz out_reg_hi, false_reg_hi, TMP
5654 can_move_conditionally = true;
5655 use_const_for_true_in = true;
5656 } else if (is_false_value_zero_constant) {
5657 // mfc1 TMP, temp_cond_reg
5658 // selnez out_reg_lo, true_reg_lo, TMP
5659 // selnez out_reg_hi, true_reg_hi, TMP
5660 can_move_conditionally = true;
5661 use_const_for_false_in = true;
5662 }
5663 // Other long conditional moves would generate 6+ instructions,
5664 // which is too many.
5665 } else {
5666 // movt out_reg_lo, true_reg_lo/ZERO, cc
5667 // movt out_reg_hi, true_reg_hi/ZERO, cc
5668 can_move_conditionally = true;
5669 use_const_for_true_in = is_true_value_zero_constant;
5670 }
5671 break;
5672 case Primitive::kPrimFloat:
5673 case Primitive::kPrimDouble:
5674 // Moving float/double on float/double condition.
5675 if (is_r6) {
5676 can_move_conditionally = true;
5677 if (is_true_value_zero_constant) {
5678 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5679 use_const_for_true_in = true;
5680 } else if (is_false_value_zero_constant) {
5681 // selnez.fmt out_reg, true_reg, temp_cond_reg
5682 use_const_for_false_in = true;
5683 } else {
5684 // sel.fmt temp_cond_reg, false_reg, true_reg
5685 // mov.fmt out_reg, temp_cond_reg
5686 }
5687 } else {
5688 // movt.fmt out_reg, true_reg, cc
5689 can_move_conditionally = true;
5690 }
5691 break;
5692 }
5693 break;
5694 }
5695 }
5696
5697 if (can_move_conditionally) {
5698 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5699 } else {
5700 DCHECK(!use_const_for_false_in);
5701 DCHECK(!use_const_for_true_in);
5702 }
5703
5704 if (locations_to_set != nullptr) {
5705 if (use_const_for_false_in) {
5706 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5707 } else {
5708 locations_to_set->SetInAt(0,
5709 Primitive::IsFloatingPointType(dst_type)
5710 ? Location::RequiresFpuRegister()
5711 : Location::RequiresRegister());
5712 }
5713 if (use_const_for_true_in) {
5714 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5715 } else {
5716 locations_to_set->SetInAt(1,
5717 Primitive::IsFloatingPointType(dst_type)
5718 ? Location::RequiresFpuRegister()
5719 : Location::RequiresRegister());
5720 }
5721 if (materialized) {
5722 locations_to_set->SetInAt(2, Location::RequiresRegister());
5723 }
5724 // On R6 we don't require the output to be the same as the
5725 // first input for conditional moves unlike on R2.
5726 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5727 if (is_out_same_as_first_in) {
5728 locations_to_set->SetOut(Location::SameAsFirstInput());
5729 } else {
5730 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5731 ? Location::RequiresFpuRegister()
5732 : Location::RequiresRegister());
5733 }
5734 }
5735
5736 return can_move_conditionally;
5737}
5738
5739void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5740 LocationSummary* locations = select->GetLocations();
5741 Location dst = locations->Out();
5742 Location src = locations->InAt(1);
5743 Register src_reg = ZERO;
5744 Register src_reg_high = ZERO;
5745 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5746 Register cond_reg = TMP;
5747 int cond_cc = 0;
5748 Primitive::Type cond_type = Primitive::kPrimInt;
5749 bool cond_inverted = false;
5750 Primitive::Type dst_type = select->GetType();
5751
5752 if (IsBooleanValueOrMaterializedCondition(cond)) {
5753 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5754 } else {
5755 HCondition* condition = cond->AsCondition();
5756 LocationSummary* cond_locations = cond->GetLocations();
5757 IfCondition if_cond = condition->GetCondition();
5758 cond_type = condition->InputAt(0)->GetType();
5759 switch (cond_type) {
5760 default:
5761 DCHECK_NE(cond_type, Primitive::kPrimLong);
5762 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5763 break;
5764 case Primitive::kPrimFloat:
5765 case Primitive::kPrimDouble:
5766 cond_inverted = MaterializeFpCompareR2(if_cond,
5767 condition->IsGtBias(),
5768 cond_type,
5769 cond_locations,
5770 cond_cc);
5771 break;
5772 }
5773 }
5774
5775 DCHECK(dst.Equals(locations->InAt(0)));
5776 if (src.IsRegister()) {
5777 src_reg = src.AsRegister<Register>();
5778 } else if (src.IsRegisterPair()) {
5779 src_reg = src.AsRegisterPairLow<Register>();
5780 src_reg_high = src.AsRegisterPairHigh<Register>();
5781 } else if (src.IsConstant()) {
5782 DCHECK(src.GetConstant()->IsZeroBitPattern());
5783 }
5784
5785 switch (cond_type) {
5786 default:
5787 switch (dst_type) {
5788 default:
5789 if (cond_inverted) {
5790 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5791 } else {
5792 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5793 }
5794 break;
5795 case Primitive::kPrimLong:
5796 if (cond_inverted) {
5797 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5798 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5799 } else {
5800 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5801 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5802 }
5803 break;
5804 case Primitive::kPrimFloat:
5805 if (cond_inverted) {
5806 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5807 } else {
5808 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5809 }
5810 break;
5811 case Primitive::kPrimDouble:
5812 if (cond_inverted) {
5813 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5814 } else {
5815 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5816 }
5817 break;
5818 }
5819 break;
5820 case Primitive::kPrimLong:
5821 LOG(FATAL) << "Unreachable";
5822 UNREACHABLE();
5823 case Primitive::kPrimFloat:
5824 case Primitive::kPrimDouble:
5825 switch (dst_type) {
5826 default:
5827 if (cond_inverted) {
5828 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5829 } else {
5830 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5831 }
5832 break;
5833 case Primitive::kPrimLong:
5834 if (cond_inverted) {
5835 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5836 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5837 } else {
5838 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5839 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5840 }
5841 break;
5842 case Primitive::kPrimFloat:
5843 if (cond_inverted) {
5844 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5845 } else {
5846 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5847 }
5848 break;
5849 case Primitive::kPrimDouble:
5850 if (cond_inverted) {
5851 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5852 } else {
5853 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5854 }
5855 break;
5856 }
5857 break;
5858 }
5859}
5860
5861void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5862 LocationSummary* locations = select->GetLocations();
5863 Location dst = locations->Out();
5864 Location false_src = locations->InAt(0);
5865 Location true_src = locations->InAt(1);
5866 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5867 Register cond_reg = TMP;
5868 FRegister fcond_reg = FTMP;
5869 Primitive::Type cond_type = Primitive::kPrimInt;
5870 bool cond_inverted = false;
5871 Primitive::Type dst_type = select->GetType();
5872
5873 if (IsBooleanValueOrMaterializedCondition(cond)) {
5874 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5875 } else {
5876 HCondition* condition = cond->AsCondition();
5877 LocationSummary* cond_locations = cond->GetLocations();
5878 IfCondition if_cond = condition->GetCondition();
5879 cond_type = condition->InputAt(0)->GetType();
5880 switch (cond_type) {
5881 default:
5882 DCHECK_NE(cond_type, Primitive::kPrimLong);
5883 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5884 break;
5885 case Primitive::kPrimFloat:
5886 case Primitive::kPrimDouble:
5887 cond_inverted = MaterializeFpCompareR6(if_cond,
5888 condition->IsGtBias(),
5889 cond_type,
5890 cond_locations,
5891 fcond_reg);
5892 break;
5893 }
5894 }
5895
5896 if (true_src.IsConstant()) {
5897 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5898 }
5899 if (false_src.IsConstant()) {
5900 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5901 }
5902
5903 switch (dst_type) {
5904 default:
5905 if (Primitive::IsFloatingPointType(cond_type)) {
5906 __ Mfc1(cond_reg, fcond_reg);
5907 }
5908 if (true_src.IsConstant()) {
5909 if (cond_inverted) {
5910 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5911 } else {
5912 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5913 }
5914 } else if (false_src.IsConstant()) {
5915 if (cond_inverted) {
5916 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5917 } else {
5918 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5919 }
5920 } else {
5921 DCHECK_NE(cond_reg, AT);
5922 if (cond_inverted) {
5923 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5924 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5925 } else {
5926 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5927 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5928 }
5929 __ Or(dst.AsRegister<Register>(), AT, TMP);
5930 }
5931 break;
5932 case Primitive::kPrimLong: {
5933 if (Primitive::IsFloatingPointType(cond_type)) {
5934 __ Mfc1(cond_reg, fcond_reg);
5935 }
5936 Register dst_lo = dst.AsRegisterPairLow<Register>();
5937 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5938 if (true_src.IsConstant()) {
5939 Register src_lo = false_src.AsRegisterPairLow<Register>();
5940 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5941 if (cond_inverted) {
5942 __ Selnez(dst_lo, src_lo, cond_reg);
5943 __ Selnez(dst_hi, src_hi, cond_reg);
5944 } else {
5945 __ Seleqz(dst_lo, src_lo, cond_reg);
5946 __ Seleqz(dst_hi, src_hi, cond_reg);
5947 }
5948 } else {
5949 DCHECK(false_src.IsConstant());
5950 Register src_lo = true_src.AsRegisterPairLow<Register>();
5951 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5952 if (cond_inverted) {
5953 __ Seleqz(dst_lo, src_lo, cond_reg);
5954 __ Seleqz(dst_hi, src_hi, cond_reg);
5955 } else {
5956 __ Selnez(dst_lo, src_lo, cond_reg);
5957 __ Selnez(dst_hi, src_hi, cond_reg);
5958 }
5959 }
5960 break;
5961 }
5962 case Primitive::kPrimFloat: {
5963 if (!Primitive::IsFloatingPointType(cond_type)) {
5964 // sel*.fmt tests bit 0 of the condition register, account for that.
5965 __ Sltu(TMP, ZERO, cond_reg);
5966 __ Mtc1(TMP, fcond_reg);
5967 }
5968 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5969 if (true_src.IsConstant()) {
5970 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5971 if (cond_inverted) {
5972 __ SelnezS(dst_reg, src_reg, fcond_reg);
5973 } else {
5974 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5975 }
5976 } else if (false_src.IsConstant()) {
5977 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5978 if (cond_inverted) {
5979 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5980 } else {
5981 __ SelnezS(dst_reg, src_reg, fcond_reg);
5982 }
5983 } else {
5984 if (cond_inverted) {
5985 __ SelS(fcond_reg,
5986 true_src.AsFpuRegister<FRegister>(),
5987 false_src.AsFpuRegister<FRegister>());
5988 } else {
5989 __ SelS(fcond_reg,
5990 false_src.AsFpuRegister<FRegister>(),
5991 true_src.AsFpuRegister<FRegister>());
5992 }
5993 __ MovS(dst_reg, fcond_reg);
5994 }
5995 break;
5996 }
5997 case Primitive::kPrimDouble: {
5998 if (!Primitive::IsFloatingPointType(cond_type)) {
5999 // sel*.fmt tests bit 0 of the condition register, account for that.
6000 __ Sltu(TMP, ZERO, cond_reg);
6001 __ Mtc1(TMP, fcond_reg);
6002 }
6003 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
6004 if (true_src.IsConstant()) {
6005 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
6006 if (cond_inverted) {
6007 __ SelnezD(dst_reg, src_reg, fcond_reg);
6008 } else {
6009 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6010 }
6011 } else if (false_src.IsConstant()) {
6012 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
6013 if (cond_inverted) {
6014 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6015 } else {
6016 __ SelnezD(dst_reg, src_reg, fcond_reg);
6017 }
6018 } else {
6019 if (cond_inverted) {
6020 __ SelD(fcond_reg,
6021 true_src.AsFpuRegister<FRegister>(),
6022 false_src.AsFpuRegister<FRegister>());
6023 } else {
6024 __ SelD(fcond_reg,
6025 false_src.AsFpuRegister<FRegister>(),
6026 true_src.AsFpuRegister<FRegister>());
6027 }
6028 __ MovD(dst_reg, fcond_reg);
6029 }
6030 break;
6031 }
6032 }
6033}
6034
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006035void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6036 LocationSummary* locations = new (GetGraph()->GetArena())
6037 LocationSummary(flag, LocationSummary::kNoCall);
6038 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07006039}
6040
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006041void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6042 __ LoadFromOffset(kLoadWord,
6043 flag->GetLocations()->Out().AsRegister<Register>(),
6044 SP,
6045 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07006046}
6047
David Brazdil74eb1b22015-12-14 11:44:01 +00006048void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
6049 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006050 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00006051}
6052
6053void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006054 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
6055 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
6056 if (is_r6) {
6057 GenConditionalMoveR6(select);
6058 } else {
6059 GenConditionalMoveR2(select);
6060 }
6061 } else {
6062 LocationSummary* locations = select->GetLocations();
6063 MipsLabel false_target;
6064 GenerateTestAndBranch(select,
6065 /* condition_input_index */ 2,
6066 /* true_target */ nullptr,
6067 &false_target);
6068 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6069 __ Bind(&false_target);
6070 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006071}
6072
David Srbecky0cf44932015-12-09 14:09:59 +00006073void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6074 new (GetGraph()->GetArena()) LocationSummary(info);
6075}
6076
David Srbeckyd28f4a02016-03-14 17:14:24 +00006077void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6078 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006079}
6080
6081void CodeGeneratorMIPS::GenerateNop() {
6082 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006083}
6084
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006085void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6086 Primitive::Type field_type = field_info.GetFieldType();
6087 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6088 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006089 bool object_field_get_with_read_barrier =
6090 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006091 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006092 instruction,
6093 generate_volatile
6094 ? LocationSummary::kCallOnMainOnly
6095 : (object_field_get_with_read_barrier
6096 ? LocationSummary::kCallOnSlowPath
6097 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006098
Alexey Frunzec61c0762017-04-10 13:54:23 -07006099 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6100 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6101 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006102 locations->SetInAt(0, Location::RequiresRegister());
6103 if (generate_volatile) {
6104 InvokeRuntimeCallingConvention calling_convention;
6105 // need A0 to hold base + offset
6106 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6107 if (field_type == Primitive::kPrimLong) {
6108 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6109 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006110 // Use Location::Any() to prevent situations when running out of available fp registers.
6111 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006112 // Need some temp core regs since FP results are returned in core registers
6113 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6114 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6115 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6116 }
6117 } else {
6118 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6119 locations->SetOut(Location::RequiresFpuRegister());
6120 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006121 // The output overlaps in the case of an object field get with
6122 // read barriers enabled: we do not want the move to overwrite the
6123 // object's location, as we need it to emit the read barrier.
6124 locations->SetOut(Location::RequiresRegister(),
6125 object_field_get_with_read_barrier
6126 ? Location::kOutputOverlap
6127 : Location::kNoOutputOverlap);
6128 }
6129 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6130 // We need a temporary register for the read barrier marking slow
6131 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006132 if (!kBakerReadBarrierThunksEnableForFields) {
6133 locations->AddTemp(Location::RequiresRegister());
6134 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006135 }
6136 }
6137}
6138
6139void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6140 const FieldInfo& field_info,
6141 uint32_t dex_pc) {
6142 Primitive::Type type = field_info.GetFieldType();
6143 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006144 Location obj_loc = locations->InAt(0);
6145 Register obj = obj_loc.AsRegister<Register>();
6146 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006147 LoadOperandType load_type = kLoadUnsignedByte;
6148 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006149 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006150 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006151
6152 switch (type) {
6153 case Primitive::kPrimBoolean:
6154 load_type = kLoadUnsignedByte;
6155 break;
6156 case Primitive::kPrimByte:
6157 load_type = kLoadSignedByte;
6158 break;
6159 case Primitive::kPrimShort:
6160 load_type = kLoadSignedHalfword;
6161 break;
6162 case Primitive::kPrimChar:
6163 load_type = kLoadUnsignedHalfword;
6164 break;
6165 case Primitive::kPrimInt:
6166 case Primitive::kPrimFloat:
6167 case Primitive::kPrimNot:
6168 load_type = kLoadWord;
6169 break;
6170 case Primitive::kPrimLong:
6171 case Primitive::kPrimDouble:
6172 load_type = kLoadDoubleword;
6173 break;
6174 case Primitive::kPrimVoid:
6175 LOG(FATAL) << "Unreachable type " << type;
6176 UNREACHABLE();
6177 }
6178
6179 if (is_volatile && load_type == kLoadDoubleword) {
6180 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006181 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006182 // Do implicit Null check
6183 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6184 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006185 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006186 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6187 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006188 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006189 if (dst_loc.IsFpuRegister()) {
6190 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006191 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006192 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006193 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006194 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006195 __ StoreToOffset(kStoreWord,
6196 locations->GetTemp(1).AsRegister<Register>(),
6197 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006198 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006199 __ StoreToOffset(kStoreWord,
6200 locations->GetTemp(2).AsRegister<Register>(),
6201 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006202 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006203 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006204 }
6205 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006206 if (type == Primitive::kPrimNot) {
6207 // /* HeapReference<Object> */ dst = *(obj + offset)
6208 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006209 Location temp_loc =
6210 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08006211 // Note that a potential implicit null check is handled in this
6212 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6213 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6214 dst_loc,
6215 obj,
6216 offset,
6217 temp_loc,
6218 /* needs_null_check */ true);
6219 if (is_volatile) {
6220 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6221 }
6222 } else {
6223 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6224 if (is_volatile) {
6225 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6226 }
6227 // If read barriers are enabled, emit read barriers other than
6228 // Baker's using a slow path (and also unpoison the loaded
6229 // reference, if heap poisoning is enabled).
6230 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6231 }
6232 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006233 Register dst;
6234 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006235 DCHECK(dst_loc.IsRegisterPair());
6236 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006237 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006238 DCHECK(dst_loc.IsRegister());
6239 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006240 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006241 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006242 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006243 DCHECK(dst_loc.IsFpuRegister());
6244 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006245 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006246 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006247 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006248 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006249 }
6250 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006251 }
6252
Alexey Frunze15958152017-02-09 19:08:30 -08006253 // Memory barriers, in the case of references, are handled in the
6254 // previous switch statement.
6255 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006256 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6257 }
6258}
6259
6260void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6261 Primitive::Type field_type = field_info.GetFieldType();
6262 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6263 bool generate_volatile = field_info.IsVolatile() && is_wide;
6264 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006265 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006266
6267 locations->SetInAt(0, Location::RequiresRegister());
6268 if (generate_volatile) {
6269 InvokeRuntimeCallingConvention calling_convention;
6270 // need A0 to hold base + offset
6271 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6272 if (field_type == Primitive::kPrimLong) {
6273 locations->SetInAt(1, Location::RegisterPairLocation(
6274 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6275 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006276 // Use Location::Any() to prevent situations when running out of available fp registers.
6277 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006278 // Pass FP parameters in core registers.
6279 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6280 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6281 }
6282 } else {
6283 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006284 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006285 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006286 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006287 }
6288 }
6289}
6290
6291void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6292 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006293 uint32_t dex_pc,
6294 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006295 Primitive::Type type = field_info.GetFieldType();
6296 LocationSummary* locations = instruction->GetLocations();
6297 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006298 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006299 StoreOperandType store_type = kStoreByte;
6300 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006301 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006302 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006303 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006304
6305 switch (type) {
6306 case Primitive::kPrimBoolean:
6307 case Primitive::kPrimByte:
6308 store_type = kStoreByte;
6309 break;
6310 case Primitive::kPrimShort:
6311 case Primitive::kPrimChar:
6312 store_type = kStoreHalfword;
6313 break;
6314 case Primitive::kPrimInt:
6315 case Primitive::kPrimFloat:
6316 case Primitive::kPrimNot:
6317 store_type = kStoreWord;
6318 break;
6319 case Primitive::kPrimLong:
6320 case Primitive::kPrimDouble:
6321 store_type = kStoreDoubleword;
6322 break;
6323 case Primitive::kPrimVoid:
6324 LOG(FATAL) << "Unreachable type " << type;
6325 UNREACHABLE();
6326 }
6327
6328 if (is_volatile) {
6329 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6330 }
6331
6332 if (is_volatile && store_type == kStoreDoubleword) {
6333 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006334 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006335 // Do implicit Null check.
6336 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6337 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6338 if (type == Primitive::kPrimDouble) {
6339 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006340 if (value_location.IsFpuRegister()) {
6341 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6342 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006343 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006344 value_location.AsFpuRegister<FRegister>());
6345 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006346 __ LoadFromOffset(kLoadWord,
6347 locations->GetTemp(1).AsRegister<Register>(),
6348 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006349 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006350 __ LoadFromOffset(kLoadWord,
6351 locations->GetTemp(2).AsRegister<Register>(),
6352 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006353 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006354 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006355 DCHECK(value_location.IsConstant());
6356 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6357 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006358 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6359 locations->GetTemp(1).AsRegister<Register>(),
6360 value);
6361 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006362 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006363 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006364 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6365 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006366 if (value_location.IsConstant()) {
6367 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6368 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6369 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006370 Register src;
6371 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006372 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006373 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006374 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006375 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006376 if (kPoisonHeapReferences && needs_write_barrier) {
6377 // Note that in the case where `value` is a null reference,
6378 // we do not enter this block, as a null reference does not
6379 // need poisoning.
6380 DCHECK_EQ(type, Primitive::kPrimNot);
6381 __ PoisonHeapReference(TMP, src);
6382 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6383 } else {
6384 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6385 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006386 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006387 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006388 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006389 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006390 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006391 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006392 }
6393 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006394 }
6395
Alexey Frunzec061de12017-02-14 13:27:23 -08006396 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006397 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006398 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006399 }
6400
6401 if (is_volatile) {
6402 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6403 }
6404}
6405
6406void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6407 HandleFieldGet(instruction, instruction->GetFieldInfo());
6408}
6409
6410void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6411 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6412}
6413
6414void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6415 HandleFieldSet(instruction, instruction->GetFieldInfo());
6416}
6417
6418void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006419 HandleFieldSet(instruction,
6420 instruction->GetFieldInfo(),
6421 instruction->GetDexPc(),
6422 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006423}
6424
Alexey Frunze15958152017-02-09 19:08:30 -08006425void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6426 HInstruction* instruction,
6427 Location out,
6428 uint32_t offset,
6429 Location maybe_temp,
6430 ReadBarrierOption read_barrier_option) {
6431 Register out_reg = out.AsRegister<Register>();
6432 if (read_barrier_option == kWithReadBarrier) {
6433 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006434 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
6435 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6436 }
Alexey Frunze15958152017-02-09 19:08:30 -08006437 if (kUseBakerReadBarrier) {
6438 // Load with fast path based Baker's read barrier.
6439 // /* HeapReference<Object> */ out = *(out + offset)
6440 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6441 out,
6442 out_reg,
6443 offset,
6444 maybe_temp,
6445 /* needs_null_check */ false);
6446 } else {
6447 // Load with slow path based read barrier.
6448 // Save the value of `out` into `maybe_temp` before overwriting it
6449 // in the following move operation, as we will need it for the
6450 // read barrier below.
6451 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6452 // /* HeapReference<Object> */ out = *(out + offset)
6453 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6454 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6455 }
6456 } else {
6457 // Plain load with no read barrier.
6458 // /* HeapReference<Object> */ out = *(out + offset)
6459 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6460 __ MaybeUnpoisonHeapReference(out_reg);
6461 }
6462}
6463
6464void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6465 HInstruction* instruction,
6466 Location out,
6467 Location obj,
6468 uint32_t offset,
6469 Location maybe_temp,
6470 ReadBarrierOption read_barrier_option) {
6471 Register out_reg = out.AsRegister<Register>();
6472 Register obj_reg = obj.AsRegister<Register>();
6473 if (read_barrier_option == kWithReadBarrier) {
6474 CHECK(kEmitCompilerReadBarrier);
6475 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006476 if (!kBakerReadBarrierThunksEnableForFields) {
6477 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6478 }
Alexey Frunze15958152017-02-09 19:08:30 -08006479 // Load with fast path based Baker's read barrier.
6480 // /* HeapReference<Object> */ out = *(obj + offset)
6481 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6482 out,
6483 obj_reg,
6484 offset,
6485 maybe_temp,
6486 /* needs_null_check */ false);
6487 } else {
6488 // Load with slow path based read barrier.
6489 // /* HeapReference<Object> */ out = *(obj + offset)
6490 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6491 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6492 }
6493 } else {
6494 // Plain load with no read barrier.
6495 // /* HeapReference<Object> */ out = *(obj + offset)
6496 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6497 __ MaybeUnpoisonHeapReference(out_reg);
6498 }
6499}
6500
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006501static inline int GetBakerMarkThunkNumber(Register reg) {
6502 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 21, "Expecting equal");
6503 if (reg >= V0 && reg <= T7) { // 14 consequtive regs.
6504 return reg - V0;
6505 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
6506 return 14 + (reg - S2);
6507 } else if (reg == FP) { // One more.
6508 return 20;
6509 }
6510 LOG(FATAL) << "Unexpected register " << reg;
6511 UNREACHABLE();
6512}
6513
6514static inline int GetBakerMarkFieldArrayThunkDisplacement(Register reg, bool short_offset) {
6515 int num = GetBakerMarkThunkNumber(reg) +
6516 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
6517 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
6518}
6519
6520static inline int GetBakerMarkGcRootThunkDisplacement(Register reg) {
6521 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
6522 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
6523}
6524
Alexey Frunze15958152017-02-09 19:08:30 -08006525void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6526 Location root,
6527 Register obj,
6528 uint32_t offset,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006529 ReadBarrierOption read_barrier_option,
6530 MipsLabel* label_low) {
6531 bool reordering;
6532 if (label_low != nullptr) {
6533 DCHECK_EQ(offset, 0x5678u);
6534 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006535 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006536 if (read_barrier_option == kWithReadBarrier) {
6537 DCHECK(kEmitCompilerReadBarrier);
6538 if (kUseBakerReadBarrier) {
6539 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6540 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006541 if (kBakerReadBarrierThunksEnableForGcRoots) {
6542 // Note that we do not actually check the value of `GetIsGcMarking()`
6543 // to decide whether to mark the loaded GC root or not. Instead, we
6544 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6545 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6546 // vice versa.
6547 //
6548 // We use thunks for the slow path. That thunk checks the reference
6549 // and jumps to the entrypoint if needed.
6550 //
6551 // temp = Thread::Current()->pReadBarrierMarkReg00
6552 // // AKA &art_quick_read_barrier_mark_introspection.
6553 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6554 // if (temp != nullptr) {
6555 // temp = &gc_root_thunk<root_reg>
6556 // root = temp(root)
6557 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006558
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006559 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
6560 const int32_t entry_point_offset =
6561 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6562 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
6563 int16_t offset_low = Low16Bits(offset);
6564 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
6565 // extension in lw.
6566 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6567 Register base = short_offset ? obj : TMP;
6568 // Loading the entrypoint does not require a load acquire since it is only changed when
6569 // threads are suspended or running a checkpoint.
6570 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6571 reordering = __ SetReorder(false);
6572 if (!short_offset) {
6573 DCHECK(!label_low);
6574 __ AddUpper(base, obj, offset_high);
6575 }
6576 __ Beqz(T9, (isR6 ? 2 : 4)); // Skip jialc / addiu+jalr+nop.
6577 if (label_low != nullptr) {
6578 DCHECK(short_offset);
6579 __ Bind(label_low);
6580 }
6581 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6582 __ LoadFromOffset(kLoadWord, root_reg, base, offset_low); // Single instruction
6583 // in delay slot.
6584 if (isR6) {
6585 __ Jialc(T9, thunk_disp);
6586 } else {
6587 __ Addiu(T9, T9, thunk_disp);
6588 __ Jalr(T9);
6589 __ Nop();
6590 }
6591 __ SetReorder(reordering);
6592 } else {
6593 // Note that we do not actually check the value of `GetIsGcMarking()`
6594 // to decide whether to mark the loaded GC root or not. Instead, we
6595 // load into `temp` (T9) the read barrier mark entry point corresponding
6596 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
6597 // is false, and vice versa.
6598 //
6599 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6600 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6601 // if (temp != null) {
6602 // root = temp(root)
6603 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006604
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006605 if (label_low != nullptr) {
6606 reordering = __ SetReorder(false);
6607 __ Bind(label_low);
6608 }
6609 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6610 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6611 if (label_low != nullptr) {
6612 __ SetReorder(reordering);
6613 }
6614 static_assert(
6615 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6616 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6617 "have different sizes.");
6618 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6619 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6620 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08006621
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006622 // Slow path marking the GC root `root`.
6623 Location temp = Location::RegisterLocation(T9);
6624 SlowPathCodeMIPS* slow_path =
6625 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6626 instruction,
6627 root,
6628 /*entrypoint*/ temp);
6629 codegen_->AddSlowPath(slow_path);
6630
6631 const int32_t entry_point_offset =
6632 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6633 // Loading the entrypoint does not require a load acquire since it is only changed when
6634 // threads are suspended or running a checkpoint.
6635 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6636 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6637 __ Bind(slow_path->GetExitLabel());
6638 }
Alexey Frunze15958152017-02-09 19:08:30 -08006639 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006640 if (label_low != nullptr) {
6641 reordering = __ SetReorder(false);
6642 __ Bind(label_low);
6643 }
Alexey Frunze15958152017-02-09 19:08:30 -08006644 // GC root loaded through a slow path for read barriers other
6645 // than Baker's.
6646 // /* GcRoot<mirror::Object>* */ root = obj + offset
6647 __ Addiu32(root_reg, obj, offset);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006648 if (label_low != nullptr) {
6649 __ SetReorder(reordering);
6650 }
Alexey Frunze15958152017-02-09 19:08:30 -08006651 // /* mirror::Object* */ root = root->Read()
6652 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6653 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006654 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006655 if (label_low != nullptr) {
6656 reordering = __ SetReorder(false);
6657 __ Bind(label_low);
6658 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006659 // Plain GC root load with no read barrier.
6660 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6661 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6662 // Note that GC roots are not affected by heap poisoning, thus we
6663 // do not have to unpoison `root_reg` here.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006664 if (label_low != nullptr) {
6665 __ SetReorder(reordering);
6666 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006667 }
6668}
6669
Alexey Frunze15958152017-02-09 19:08:30 -08006670void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6671 Location ref,
6672 Register obj,
6673 uint32_t offset,
6674 Location temp,
6675 bool needs_null_check) {
6676 DCHECK(kEmitCompilerReadBarrier);
6677 DCHECK(kUseBakerReadBarrier);
6678
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006679 if (kBakerReadBarrierThunksEnableForFields) {
6680 // Note that we do not actually check the value of `GetIsGcMarking()`
6681 // to decide whether to mark the loaded reference or not. Instead, we
6682 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6683 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6684 // vice versa.
6685 //
6686 // We use thunks for the slow path. That thunk checks the reference
6687 // and jumps to the entrypoint if needed. If the holder is not gray,
6688 // it issues a load-load memory barrier and returns to the original
6689 // reference load.
6690 //
6691 // temp = Thread::Current()->pReadBarrierMarkReg00
6692 // // AKA &art_quick_read_barrier_mark_introspection.
6693 // if (temp != nullptr) {
6694 // temp = &field_array_thunk<holder_reg>
6695 // temp()
6696 // }
6697 // not_gray_return_address:
6698 // // If the offset is too large to fit into the lw instruction, we
6699 // // use an adjusted base register (TMP) here. This register
6700 // // receives bits 16 ... 31 of the offset before the thunk invocation
6701 // // and the thunk benefits from it.
6702 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
6703 // gray_return_address:
6704
6705 DCHECK(temp.IsInvalid());
6706 bool isR6 = GetInstructionSetFeatures().IsR6();
6707 int16_t offset_low = Low16Bits(offset);
6708 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lw.
6709 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6710 bool reordering = __ SetReorder(false);
6711 const int32_t entry_point_offset =
6712 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6713 // There may have or may have not been a null check if the field offset is smaller than
6714 // the page size.
6715 // There must've been a null check in case it's actually a load from an array.
6716 // We will, however, perform an explicit null check in the thunk as it's easier to
6717 // do it than not.
6718 if (instruction->IsArrayGet()) {
6719 DCHECK(!needs_null_check);
6720 }
6721 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
6722 // Loading the entrypoint does not require a load acquire since it is only changed when
6723 // threads are suspended or running a checkpoint.
6724 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6725 Register ref_reg = ref.AsRegister<Register>();
6726 Register base = short_offset ? obj : TMP;
6727 if (short_offset) {
6728 if (isR6) {
6729 __ Beqzc(T9, 2); // Skip jialc.
6730 __ Nop(); // In forbidden slot.
6731 __ Jialc(T9, thunk_disp);
6732 } else {
6733 __ Beqz(T9, 3); // Skip jalr+nop.
6734 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6735 __ Jalr(T9);
6736 __ Nop(); // In delay slot.
6737 }
6738 } else {
6739 if (isR6) {
6740 __ Beqz(T9, 2); // Skip jialc.
6741 __ Aui(base, obj, offset_high); // In delay slot.
6742 __ Jialc(T9, thunk_disp);
6743 } else {
6744 __ Lui(base, offset_high);
6745 __ Beqz(T9, 2); // Skip jalr.
6746 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6747 __ Jalr(T9);
6748 __ Addu(base, base, obj); // In delay slot.
6749 }
6750 }
6751 // /* HeapReference<Object> */ ref = *(obj + offset)
6752 __ LoadFromOffset(kLoadWord, ref_reg, base, offset_low); // Single instruction.
6753 if (needs_null_check) {
6754 MaybeRecordImplicitNullCheck(instruction);
6755 }
6756 __ MaybeUnpoisonHeapReference(ref_reg);
6757 __ SetReorder(reordering);
6758 return;
6759 }
6760
Alexey Frunze15958152017-02-09 19:08:30 -08006761 // /* HeapReference<Object> */ ref = *(obj + offset)
6762 Location no_index = Location::NoLocation();
6763 ScaleFactor no_scale_factor = TIMES_1;
6764 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6765 ref,
6766 obj,
6767 offset,
6768 no_index,
6769 no_scale_factor,
6770 temp,
6771 needs_null_check);
6772}
6773
6774void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6775 Location ref,
6776 Register obj,
6777 uint32_t data_offset,
6778 Location index,
6779 Location temp,
6780 bool needs_null_check) {
6781 DCHECK(kEmitCompilerReadBarrier);
6782 DCHECK(kUseBakerReadBarrier);
6783
6784 static_assert(
6785 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6786 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006787 ScaleFactor scale_factor = TIMES_4;
6788
6789 if (kBakerReadBarrierThunksEnableForArrays) {
6790 // Note that we do not actually check the value of `GetIsGcMarking()`
6791 // to decide whether to mark the loaded reference or not. Instead, we
6792 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6793 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6794 // vice versa.
6795 //
6796 // We use thunks for the slow path. That thunk checks the reference
6797 // and jumps to the entrypoint if needed. If the holder is not gray,
6798 // it issues a load-load memory barrier and returns to the original
6799 // reference load.
6800 //
6801 // temp = Thread::Current()->pReadBarrierMarkReg00
6802 // // AKA &art_quick_read_barrier_mark_introspection.
6803 // if (temp != nullptr) {
6804 // temp = &field_array_thunk<holder_reg>
6805 // temp()
6806 // }
6807 // not_gray_return_address:
6808 // // The element address is pre-calculated in the TMP register before the
6809 // // thunk invocation and the thunk benefits from it.
6810 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
6811 // gray_return_address:
6812
6813 DCHECK(temp.IsInvalid());
6814 DCHECK(index.IsValid());
6815 bool reordering = __ SetReorder(false);
6816 const int32_t entry_point_offset =
6817 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6818 // We will not do the explicit null check in the thunk as some form of a null check
6819 // must've been done earlier.
6820 DCHECK(!needs_null_check);
6821 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
6822 // Loading the entrypoint does not require a load acquire since it is only changed when
6823 // threads are suspended or running a checkpoint.
6824 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6825 Register ref_reg = ref.AsRegister<Register>();
6826 Register index_reg = index.IsRegisterPair()
6827 ? index.AsRegisterPairLow<Register>()
6828 : index.AsRegister<Register>();
6829 if (GetInstructionSetFeatures().IsR6()) {
6830 __ Beqz(T9, 2); // Skip jialc.
6831 __ Lsa(TMP, index_reg, obj, scale_factor); // In delay slot.
6832 __ Jialc(T9, thunk_disp);
6833 } else {
6834 __ Sll(TMP, index_reg, scale_factor);
6835 __ Beqz(T9, 2); // Skip jalr.
6836 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6837 __ Jalr(T9);
6838 __ Addu(TMP, TMP, obj); // In delay slot.
6839 }
6840 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
6841 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
6842 __ LoadFromOffset(kLoadWord, ref_reg, TMP, data_offset); // Single instruction.
6843 __ MaybeUnpoisonHeapReference(ref_reg);
6844 __ SetReorder(reordering);
6845 return;
6846 }
6847
Alexey Frunze15958152017-02-09 19:08:30 -08006848 // /* HeapReference<Object> */ ref =
6849 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08006850 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6851 ref,
6852 obj,
6853 data_offset,
6854 index,
6855 scale_factor,
6856 temp,
6857 needs_null_check);
6858}
6859
6860void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6861 Location ref,
6862 Register obj,
6863 uint32_t offset,
6864 Location index,
6865 ScaleFactor scale_factor,
6866 Location temp,
6867 bool needs_null_check,
6868 bool always_update_field) {
6869 DCHECK(kEmitCompilerReadBarrier);
6870 DCHECK(kUseBakerReadBarrier);
6871
6872 // In slow path based read barriers, the read barrier call is
6873 // inserted after the original load. However, in fast path based
6874 // Baker's read barriers, we need to perform the load of
6875 // mirror::Object::monitor_ *before* the original reference load.
6876 // This load-load ordering is required by the read barrier.
6877 // The fast path/slow path (for Baker's algorithm) should look like:
6878 //
6879 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6880 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6881 // HeapReference<Object> ref = *src; // Original reference load.
6882 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6883 // if (is_gray) {
6884 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6885 // }
6886 //
6887 // Note: the original implementation in ReadBarrier::Barrier is
6888 // slightly more complex as it performs additional checks that we do
6889 // not do here for performance reasons.
6890
6891 Register ref_reg = ref.AsRegister<Register>();
6892 Register temp_reg = temp.AsRegister<Register>();
6893 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6894
6895 // /* int32_t */ monitor = obj->monitor_
6896 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6897 if (needs_null_check) {
6898 MaybeRecordImplicitNullCheck(instruction);
6899 }
6900 // /* LockWord */ lock_word = LockWord(monitor)
6901 static_assert(sizeof(LockWord) == sizeof(int32_t),
6902 "art::LockWord and int32_t have different sizes.");
6903
6904 __ Sync(0); // Barrier to prevent load-load reordering.
6905
6906 // The actual reference load.
6907 if (index.IsValid()) {
6908 // Load types involving an "index": ArrayGet,
6909 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6910 // intrinsics.
6911 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6912 if (index.IsConstant()) {
6913 size_t computed_offset =
6914 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6915 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6916 } else {
6917 // Handle the special case of the
6918 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6919 // intrinsics, which use a register pair as index ("long
6920 // offset"), of which only the low part contains data.
6921 Register index_reg = index.IsRegisterPair()
6922 ? index.AsRegisterPairLow<Register>()
6923 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006924 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006925 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6926 }
6927 } else {
6928 // /* HeapReference<Object> */ ref = *(obj + offset)
6929 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6930 }
6931
6932 // Object* ref = ref_addr->AsMirrorPtr()
6933 __ MaybeUnpoisonHeapReference(ref_reg);
6934
6935 // Slow path marking the object `ref` when it is gray.
6936 SlowPathCodeMIPS* slow_path;
6937 if (always_update_field) {
6938 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6939 // of the form `obj + field_offset`, where `obj` is a register and
6940 // `field_offset` is a register pair (of which only the lower half
6941 // is used). Thus `offset` and `scale_factor` above are expected
6942 // to be null in this code path.
6943 DCHECK_EQ(offset, 0u);
6944 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6945 slow_path = new (GetGraph()->GetArena())
6946 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6947 ref,
6948 obj,
6949 /* field_offset */ index,
6950 temp_reg);
6951 } else {
6952 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6953 }
6954 AddSlowPath(slow_path);
6955
6956 // if (rb_state == ReadBarrier::GrayState())
6957 // ref = ReadBarrier::Mark(ref);
6958 // Given the numeric representation, it's enough to check the low bit of the
6959 // rb_state. We do that by shifting the bit into the sign bit (31) and
6960 // performing a branch on less than zero.
6961 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6962 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6963 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6964 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6965 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6966 __ Bind(slow_path->GetExitLabel());
6967}
6968
6969void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6970 Location out,
6971 Location ref,
6972 Location obj,
6973 uint32_t offset,
6974 Location index) {
6975 DCHECK(kEmitCompilerReadBarrier);
6976
6977 // Insert a slow path based read barrier *after* the reference load.
6978 //
6979 // If heap poisoning is enabled, the unpoisoning of the loaded
6980 // reference will be carried out by the runtime within the slow
6981 // path.
6982 //
6983 // Note that `ref` currently does not get unpoisoned (when heap
6984 // poisoning is enabled), which is alright as the `ref` argument is
6985 // not used by the artReadBarrierSlow entry point.
6986 //
6987 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6988 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6989 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6990 AddSlowPath(slow_path);
6991
6992 __ B(slow_path->GetEntryLabel());
6993 __ Bind(slow_path->GetExitLabel());
6994}
6995
6996void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6997 Location out,
6998 Location ref,
6999 Location obj,
7000 uint32_t offset,
7001 Location index) {
7002 if (kEmitCompilerReadBarrier) {
7003 // Baker's read barriers shall be handled by the fast path
7004 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
7005 DCHECK(!kUseBakerReadBarrier);
7006 // If heap poisoning is enabled, unpoisoning will be taken care of
7007 // by the runtime within the slow path.
7008 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
7009 } else if (kPoisonHeapReferences) {
7010 __ UnpoisonHeapReference(out.AsRegister<Register>());
7011 }
7012}
7013
7014void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7015 Location out,
7016 Location root) {
7017 DCHECK(kEmitCompilerReadBarrier);
7018
7019 // Insert a slow path based read barrier *after* the GC root load.
7020 //
7021 // Note that GC roots are not affected by heap poisoning, so we do
7022 // not need to do anything special for this here.
7023 SlowPathCodeMIPS* slow_path =
7024 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
7025 AddSlowPath(slow_path);
7026
7027 __ B(slow_path->GetEntryLabel());
7028 __ Bind(slow_path->GetExitLabel());
7029}
7030
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007031void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007032 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7033 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007034 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007035 switch (type_check_kind) {
7036 case TypeCheckKind::kExactCheck:
7037 case TypeCheckKind::kAbstractClassCheck:
7038 case TypeCheckKind::kClassHierarchyCheck:
7039 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08007040 call_kind =
7041 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007042 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007043 break;
7044 case TypeCheckKind::kArrayCheck:
7045 case TypeCheckKind::kUnresolvedCheck:
7046 case TypeCheckKind::kInterfaceCheck:
7047 call_kind = LocationSummary::kCallOnSlowPath;
7048 break;
7049 }
7050
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007051 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007052 if (baker_read_barrier_slow_path) {
7053 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7054 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007055 locations->SetInAt(0, Location::RequiresRegister());
7056 locations->SetInAt(1, Location::RequiresRegister());
7057 // The output does overlap inputs.
7058 // Note that TypeCheckSlowPathMIPS uses this register too.
7059 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08007060 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007061}
7062
7063void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007064 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007065 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08007066 Location obj_loc = locations->InAt(0);
7067 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007068 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08007069 Location out_loc = locations->Out();
7070 Register out = out_loc.AsRegister<Register>();
7071 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7072 DCHECK_LE(num_temps, 1u);
7073 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007074 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7075 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7076 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7077 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007078 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007079 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007080
7081 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007082 // Avoid this check if we know `obj` is not null.
7083 if (instruction->MustDoNullCheck()) {
7084 __ Move(out, ZERO);
7085 __ Beqz(obj, &done);
7086 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007087
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007088 switch (type_check_kind) {
7089 case TypeCheckKind::kExactCheck: {
7090 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007091 GenerateReferenceLoadTwoRegisters(instruction,
7092 out_loc,
7093 obj_loc,
7094 class_offset,
7095 maybe_temp_loc,
7096 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007097 // Classes must be equal for the instanceof to succeed.
7098 __ Xor(out, out, cls);
7099 __ Sltiu(out, out, 1);
7100 break;
7101 }
7102
7103 case TypeCheckKind::kAbstractClassCheck: {
7104 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007105 GenerateReferenceLoadTwoRegisters(instruction,
7106 out_loc,
7107 obj_loc,
7108 class_offset,
7109 maybe_temp_loc,
7110 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007111 // If the class is abstract, we eagerly fetch the super class of the
7112 // object to avoid doing a comparison we know will fail.
7113 MipsLabel loop;
7114 __ Bind(&loop);
7115 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007116 GenerateReferenceLoadOneRegister(instruction,
7117 out_loc,
7118 super_offset,
7119 maybe_temp_loc,
7120 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007121 // If `out` is null, we use it for the result, and jump to `done`.
7122 __ Beqz(out, &done);
7123 __ Bne(out, cls, &loop);
7124 __ LoadConst32(out, 1);
7125 break;
7126 }
7127
7128 case TypeCheckKind::kClassHierarchyCheck: {
7129 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007130 GenerateReferenceLoadTwoRegisters(instruction,
7131 out_loc,
7132 obj_loc,
7133 class_offset,
7134 maybe_temp_loc,
7135 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007136 // Walk over the class hierarchy to find a match.
7137 MipsLabel loop, success;
7138 __ Bind(&loop);
7139 __ Beq(out, cls, &success);
7140 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007141 GenerateReferenceLoadOneRegister(instruction,
7142 out_loc,
7143 super_offset,
7144 maybe_temp_loc,
7145 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007146 __ Bnez(out, &loop);
7147 // If `out` is null, we use it for the result, and jump to `done`.
7148 __ B(&done);
7149 __ Bind(&success);
7150 __ LoadConst32(out, 1);
7151 break;
7152 }
7153
7154 case TypeCheckKind::kArrayObjectCheck: {
7155 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007156 GenerateReferenceLoadTwoRegisters(instruction,
7157 out_loc,
7158 obj_loc,
7159 class_offset,
7160 maybe_temp_loc,
7161 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007162 // Do an exact check.
7163 MipsLabel success;
7164 __ Beq(out, cls, &success);
7165 // Otherwise, we need to check that the object's class is a non-primitive array.
7166 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08007167 GenerateReferenceLoadOneRegister(instruction,
7168 out_loc,
7169 component_offset,
7170 maybe_temp_loc,
7171 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007172 // If `out` is null, we use it for the result, and jump to `done`.
7173 __ Beqz(out, &done);
7174 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7175 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
7176 __ Sltiu(out, out, 1);
7177 __ B(&done);
7178 __ Bind(&success);
7179 __ LoadConst32(out, 1);
7180 break;
7181 }
7182
7183 case TypeCheckKind::kArrayCheck: {
7184 // No read barrier since the slow path will retry upon failure.
7185 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007186 GenerateReferenceLoadTwoRegisters(instruction,
7187 out_loc,
7188 obj_loc,
7189 class_offset,
7190 maybe_temp_loc,
7191 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007192 DCHECK(locations->OnlyCallsOnSlowPath());
7193 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7194 /* is_fatal */ false);
7195 codegen_->AddSlowPath(slow_path);
7196 __ Bne(out, cls, slow_path->GetEntryLabel());
7197 __ LoadConst32(out, 1);
7198 break;
7199 }
7200
7201 case TypeCheckKind::kUnresolvedCheck:
7202 case TypeCheckKind::kInterfaceCheck: {
7203 // Note that we indeed only call on slow path, but we always go
7204 // into the slow path for the unresolved and interface check
7205 // cases.
7206 //
7207 // We cannot directly call the InstanceofNonTrivial runtime
7208 // entry point without resorting to a type checking slow path
7209 // here (i.e. by calling InvokeRuntime directly), as it would
7210 // require to assign fixed registers for the inputs of this
7211 // HInstanceOf instruction (following the runtime calling
7212 // convention), which might be cluttered by the potential first
7213 // read barrier emission at the beginning of this method.
7214 //
7215 // TODO: Introduce a new runtime entry point taking the object
7216 // to test (instead of its class) as argument, and let it deal
7217 // with the read barrier issues. This will let us refactor this
7218 // case of the `switch` code as it was previously (with a direct
7219 // call to the runtime not using a type checking slow path).
7220 // This should also be beneficial for the other cases above.
7221 DCHECK(locations->OnlyCallsOnSlowPath());
7222 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7223 /* is_fatal */ false);
7224 codegen_->AddSlowPath(slow_path);
7225 __ B(slow_path->GetEntryLabel());
7226 break;
7227 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007228 }
7229
7230 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007231
7232 if (slow_path != nullptr) {
7233 __ Bind(slow_path->GetExitLabel());
7234 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007235}
7236
7237void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
7238 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7239 locations->SetOut(Location::ConstantLocation(constant));
7240}
7241
7242void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
7243 // Will be generated at use site.
7244}
7245
7246void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
7247 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7248 locations->SetOut(Location::ConstantLocation(constant));
7249}
7250
7251void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
7252 // Will be generated at use site.
7253}
7254
7255void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
7256 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
7257 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
7258}
7259
7260void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7261 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007262 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007263 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007264 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007265}
7266
7267void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7268 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
7269 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007270 Location receiver = invoke->GetLocations()->InAt(0);
7271 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007272 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007273
7274 // Set the hidden argument.
7275 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
7276 invoke->GetDexMethodIndex());
7277
7278 // temp = object->GetClass();
7279 if (receiver.IsStackSlot()) {
7280 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
7281 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
7282 } else {
7283 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
7284 }
7285 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007286 // Instead of simply (possibly) unpoisoning `temp` here, we should
7287 // emit a read barrier for the previous class reference load.
7288 // However this is not required in practice, as this is an
7289 // intermediate/temporary reference and because the current
7290 // concurrent copying collector keeps the from-space memory
7291 // intact/accessible until the end of the marking phase (the
7292 // concurrent copying collector may not in the future).
7293 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007294 __ LoadFromOffset(kLoadWord, temp, temp,
7295 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
7296 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00007297 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007298 // temp = temp->GetImtEntryAt(method_offset);
7299 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7300 // T9 = temp->GetEntryPoint();
7301 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7302 // T9();
7303 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007304 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007305 DCHECK(!codegen_->IsLeafMethod());
7306 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7307}
7308
7309void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07007310 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7311 if (intrinsic.TryDispatch(invoke)) {
7312 return;
7313 }
7314
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007315 HandleInvoke(invoke);
7316}
7317
7318void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007319 // Explicit clinit checks triggered by static invokes must have been pruned by
7320 // art::PrepareForRegisterAllocation.
7321 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007322
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007323 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007324 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007325
Chris Larsen701566a2015-10-27 15:29:13 -07007326 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7327 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007328 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7329 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7330 }
Chris Larsen701566a2015-10-27 15:29:13 -07007331 return;
7332 }
7333
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007334 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007335
7336 // Add the extra input register if either the dex cache array base register
7337 // or the PC-relative base register for accessing literals is needed.
7338 if (has_extra_input) {
7339 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7340 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007341}
7342
Orion Hodsonac141392017-01-13 11:53:47 +00007343void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7344 HandleInvoke(invoke);
7345}
7346
7347void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7348 codegen_->GenerateInvokePolymorphicCall(invoke);
7349}
7350
Chris Larsen701566a2015-10-27 15:29:13 -07007351static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007352 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007353 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7354 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007355 return true;
7356 }
7357 return false;
7358}
7359
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007360HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007361 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007362 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007363 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007364 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007365 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007366 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007367 bool is_r6 = GetInstructionSetFeatures().IsR6();
7368 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007369 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007370 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007371 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007372 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007373 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007374 case HLoadString::LoadKind::kBootImageAddress:
7375 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007376 case HLoadString::LoadKind::kJitTableAddress:
7377 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007378 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007379 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007380 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007381 fallback_load = false;
7382 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007383 }
7384 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007385 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007386 }
7387 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007388}
7389
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007390HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7391 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007392 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007393 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007394 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7395 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007396 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007397 bool is_r6 = GetInstructionSetFeatures().IsR6();
7398 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007399 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007400 case HLoadClass::LoadKind::kInvalid:
7401 LOG(FATAL) << "UNREACHABLE";
7402 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007403 case HLoadClass::LoadKind::kReferrersClass:
7404 fallback_load = false;
7405 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007406 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007407 case HLoadClass::LoadKind::kBssEntry:
7408 DCHECK(!Runtime::Current()->UseJitCompilation());
7409 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007410 case HLoadClass::LoadKind::kBootImageAddress:
7411 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007412 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007413 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007414 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007415 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007416 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007417 fallback_load = false;
7418 break;
7419 }
7420 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007421 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007422 }
7423 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007424}
7425
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007426Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7427 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007428 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007429 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7430 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7431 if (!invoke->GetLocations()->Intrinsified()) {
7432 return location.AsRegister<Register>();
7433 }
7434 // For intrinsics we allow any location, so it may be on the stack.
7435 if (!location.IsRegister()) {
7436 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7437 return temp;
7438 }
7439 // For register locations, check if the register was saved. If so, get it from the stack.
7440 // Note: There is a chance that the register was saved but not overwritten, so we could
7441 // save one load. However, since this is just an intrinsic slow path we prefer this
7442 // simple and more robust approach rather that trying to determine if that's the case.
7443 SlowPathCode* slow_path = GetCurrentSlowPath();
7444 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7445 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7446 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7447 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7448 return temp;
7449 }
7450 return location.AsRegister<Register>();
7451}
7452
Vladimir Markodc151b22015-10-15 18:02:30 +01007453HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7454 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007455 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007456 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007457 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007458 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007459 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7460 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007461 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007462 bool is_r6 = GetInstructionSetFeatures().IsR6();
7463 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007464 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007465 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007466 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007467 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007468 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007469 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007470 break;
7471 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007472 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007473 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007474 dispatch_info.method_load_data = 0;
7475 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007476 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007477}
7478
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007479void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7480 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007481 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007482 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007483 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7484 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007485 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007486 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007487 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7488 : ZERO;
7489
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007490 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007491 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007492 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007493 uint32_t offset =
7494 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007495 __ LoadFromOffset(kLoadWord,
7496 temp.AsRegister<Register>(),
7497 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007498 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007499 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007500 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007501 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007502 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007503 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007504 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7505 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007506 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7507 PcRelativePatchInfo* info_low =
7508 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007509 bool reordering = __ SetReorder(false);
7510 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007511 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Alexey Frunze6079dca2017-05-28 19:10:28 -07007512 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678);
Vladimir Marko65979462017-05-19 17:25:12 +01007513 __ SetReorder(reordering);
7514 break;
7515 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007516 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7517 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7518 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007519 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007520 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007521 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007522 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7523 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007524 Register temp_reg = temp.AsRegister<Register>();
7525 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007526 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007527 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678);
7528 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007529 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007530 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007531 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7532 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7533 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007534 }
7535 }
7536
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007537 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007538 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007539 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007540 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007541 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7542 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007543 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007544 T9,
7545 callee_method.AsRegister<Register>(),
7546 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007547 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007548 // T9()
7549 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007550 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007551 break;
7552 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007553 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7554
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007555 DCHECK(!IsLeafMethod());
7556}
7557
7558void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007559 // Explicit clinit checks triggered by static invokes must have been pruned by
7560 // art::PrepareForRegisterAllocation.
7561 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007562
7563 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7564 return;
7565 }
7566
7567 LocationSummary* locations = invoke->GetLocations();
7568 codegen_->GenerateStaticOrDirectCall(invoke,
7569 locations->HasTemps()
7570 ? locations->GetTemp(0)
7571 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007572}
7573
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007574void CodeGeneratorMIPS::GenerateVirtualCall(
7575 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007576 // Use the calling convention instead of the location of the receiver, as
7577 // intrinsics may have put the receiver in a different register. In the intrinsics
7578 // slow path, the arguments have been moved to the right place, so here we are
7579 // guaranteed that the receiver is the first register of the calling convention.
7580 InvokeDexCallingConvention calling_convention;
7581 Register receiver = calling_convention.GetRegisterAt(0);
7582
Chris Larsen3acee732015-11-18 13:31:08 -08007583 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007584 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7585 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7586 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007587 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007588
7589 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007590 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007591 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007592 // Instead of simply (possibly) unpoisoning `temp` here, we should
7593 // emit a read barrier for the previous class reference load.
7594 // However this is not required in practice, as this is an
7595 // intermediate/temporary reference and because the current
7596 // concurrent copying collector keeps the from-space memory
7597 // intact/accessible until the end of the marking phase (the
7598 // concurrent copying collector may not in the future).
7599 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007600 // temp = temp->GetMethodAt(method_offset);
7601 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7602 // T9 = temp->GetEntryPoint();
7603 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7604 // T9();
7605 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007606 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007607 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007608}
7609
7610void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7611 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7612 return;
7613 }
7614
7615 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007616 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007617}
7618
7619void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007620 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007621 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007622 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007623 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7624 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007625 return;
7626 }
Vladimir Marko41559982017-01-06 14:04:23 +00007627 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007628 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007629 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7630 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007631 ? LocationSummary::kCallOnSlowPath
7632 : LocationSummary::kNoCall;
7633 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007634 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7635 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7636 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007637 switch (load_kind) {
7638 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007639 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007640 case HLoadClass::LoadKind::kBootImageAddress:
7641 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007642 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007643 break;
7644 }
7645 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007646 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007647 locations->SetInAt(0, Location::RequiresRegister());
7648 break;
7649 default:
7650 break;
7651 }
7652 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007653 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7654 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7655 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007656 // Request a temp to hold the BSS entry location for the slow path.
7657 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007658 RegisterSet caller_saves = RegisterSet::Empty();
7659 InvokeRuntimeCallingConvention calling_convention;
7660 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7661 locations->SetCustomSlowPathCallerSaves(caller_saves);
7662 } else {
7663 // For non-Baker read barriers we have a temp-clobbering call.
7664 }
7665 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007666}
7667
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007668// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7669// move.
7670void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007671 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007672 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007673 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007674 return;
7675 }
Vladimir Marko41559982017-01-06 14:04:23 +00007676 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007677
Vladimir Marko41559982017-01-06 14:04:23 +00007678 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007679 Location out_loc = locations->Out();
7680 Register out = out_loc.AsRegister<Register>();
7681 Register base_or_current_method_reg;
7682 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7683 switch (load_kind) {
7684 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007685 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007686 case HLoadClass::LoadKind::kBootImageAddress:
7687 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007688 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7689 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007690 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007691 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007692 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7693 break;
7694 default:
7695 base_or_current_method_reg = ZERO;
7696 break;
7697 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007698
Alexey Frunze15958152017-02-09 19:08:30 -08007699 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7700 ? kWithoutReadBarrier
7701 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007702 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007703 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007704 switch (load_kind) {
7705 case HLoadClass::LoadKind::kReferrersClass: {
7706 DCHECK(!cls->CanCallRuntime());
7707 DCHECK(!cls->MustGenerateClinitCheck());
7708 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7709 GenerateGcRootFieldLoad(cls,
7710 out_loc,
7711 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007712 ArtMethod::DeclaringClassOffset().Int32Value(),
7713 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007714 break;
7715 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007716 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007717 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007718 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007719 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007720 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007721 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7722 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007723 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007724 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7725 out,
7726 base_or_current_method_reg,
7727 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007728 __ Addiu(out, out, /* placeholder */ 0x5678);
7729 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007730 break;
7731 }
7732 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007733 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007734 uint32_t address = dchecked_integral_cast<uint32_t>(
7735 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7736 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007737 __ LoadLiteral(out,
7738 base_or_current_method_reg,
7739 codegen_->DeduplicateBootImageAddressLiteral(address));
7740 break;
7741 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007742 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007743 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7744 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7745 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007746 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007747 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7748 bool reordering = __ SetReorder(false);
7749 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7750 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007751 base_or_current_method_reg);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007752 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007753 GenerateGcRootFieldLoad(cls,
7754 out_loc,
7755 temp,
7756 /* placeholder */ 0x5678,
7757 read_barrier_option,
7758 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007759 generate_null_check = true;
7760 break;
7761 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007762 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007763 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7764 cls->GetTypeIndex(),
7765 cls->GetClass());
7766 bool reordering = __ SetReorder(false);
7767 __ Bind(&info->high_label);
7768 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007769 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007770 GenerateGcRootFieldLoad(cls,
7771 out_loc,
7772 out,
7773 /* placeholder */ 0x5678,
7774 read_barrier_option,
7775 &info->low_label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007776 break;
7777 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007778 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007779 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007780 LOG(FATAL) << "UNREACHABLE";
7781 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007782 }
7783
7784 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7785 DCHECK(cls->CanCallRuntime());
7786 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007787 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007788 codegen_->AddSlowPath(slow_path);
7789 if (generate_null_check) {
7790 __ Beqz(out, slow_path->GetEntryLabel());
7791 }
7792 if (cls->MustGenerateClinitCheck()) {
7793 GenerateClassInitializationCheck(slow_path, out);
7794 } else {
7795 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007796 }
7797 }
7798}
7799
7800static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007801 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007802}
7803
7804void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7805 LocationSummary* locations =
7806 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7807 locations->SetOut(Location::RequiresRegister());
7808}
7809
7810void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7811 Register out = load->GetLocations()->Out().AsRegister<Register>();
7812 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7813}
7814
7815void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7816 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7817}
7818
7819void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7820 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7821}
7822
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007823void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007824 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007825 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007826 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007827 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007828 switch (load_kind) {
7829 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007830 case HLoadString::LoadKind::kBootImageAddress:
7831 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007832 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007833 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007834 break;
7835 }
7836 FALLTHROUGH_INTENDED;
7837 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007838 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007839 locations->SetInAt(0, Location::RequiresRegister());
7840 break;
7841 default:
7842 break;
7843 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007844 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007845 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007846 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007847 } else {
7848 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007849 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7850 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7851 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007852 // Request a temp to hold the BSS entry location for the slow path.
7853 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007854 RegisterSet caller_saves = RegisterSet::Empty();
7855 InvokeRuntimeCallingConvention calling_convention;
7856 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7857 locations->SetCustomSlowPathCallerSaves(caller_saves);
7858 } else {
7859 // For non-Baker read barriers we have a temp-clobbering call.
7860 }
7861 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007862 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007863}
7864
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007865// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7866// move.
7867void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007868 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007869 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007870 Location out_loc = locations->Out();
7871 Register out = out_loc.AsRegister<Register>();
7872 Register base_or_current_method_reg;
7873 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7874 switch (load_kind) {
7875 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007876 case HLoadString::LoadKind::kBootImageAddress:
7877 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007878 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007879 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7880 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007881 default:
7882 base_or_current_method_reg = ZERO;
7883 break;
7884 }
7885
7886 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007887 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007888 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007889 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007890 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007891 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7892 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007893 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007894 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7895 out,
7896 base_or_current_method_reg,
7897 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007898 __ Addiu(out, out, /* placeholder */ 0x5678);
7899 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007900 return; // No dex cache slow path.
7901 }
7902 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007903 uint32_t address = dchecked_integral_cast<uint32_t>(
7904 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7905 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007906 __ LoadLiteral(out,
7907 base_or_current_method_reg,
7908 codegen_->DeduplicateBootImageAddressLiteral(address));
7909 return; // No dex cache slow path.
7910 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007911 case HLoadString::LoadKind::kBssEntry: {
7912 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007913 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007914 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007915 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7916 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007917 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007918 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7919 bool reordering = __ SetReorder(false);
7920 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7921 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007922 base_or_current_method_reg);
7923 __ SetReorder(reordering);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007924 GenerateGcRootFieldLoad(load,
7925 out_loc,
7926 temp,
7927 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007928 kCompilerReadBarrierOption,
7929 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007930 SlowPathCodeMIPS* slow_path =
7931 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007932 codegen_->AddSlowPath(slow_path);
7933 __ Beqz(out, slow_path->GetEntryLabel());
7934 __ Bind(slow_path->GetExitLabel());
7935 return;
7936 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007937 case HLoadString::LoadKind::kJitTableAddress: {
7938 CodeGeneratorMIPS::JitPatchInfo* info =
7939 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7940 load->GetStringIndex(),
7941 load->GetString());
7942 bool reordering = __ SetReorder(false);
7943 __ Bind(&info->high_label);
7944 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007945 __ SetReorder(reordering);
Alexey Frunze15958152017-02-09 19:08:30 -08007946 GenerateGcRootFieldLoad(load,
7947 out_loc,
7948 out,
7949 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007950 kCompilerReadBarrierOption,
7951 &info->low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007952 return;
7953 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007954 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007955 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007956 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007957
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007958 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007959 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007960 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007961 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007962 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007963 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7964 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007965}
7966
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007967void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7968 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7969 locations->SetOut(Location::ConstantLocation(constant));
7970}
7971
7972void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7973 // Will be generated at use site.
7974}
7975
7976void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7977 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007978 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007979 InvokeRuntimeCallingConvention calling_convention;
7980 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7981}
7982
7983void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7984 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007985 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007986 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7987 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007988 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007989 }
7990 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7991}
7992
7993void LocationsBuilderMIPS::VisitMul(HMul* mul) {
7994 LocationSummary* locations =
7995 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
7996 switch (mul->GetResultType()) {
7997 case Primitive::kPrimInt:
7998 case Primitive::kPrimLong:
7999 locations->SetInAt(0, Location::RequiresRegister());
8000 locations->SetInAt(1, Location::RequiresRegister());
8001 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8002 break;
8003
8004 case Primitive::kPrimFloat:
8005 case Primitive::kPrimDouble:
8006 locations->SetInAt(0, Location::RequiresFpuRegister());
8007 locations->SetInAt(1, Location::RequiresFpuRegister());
8008 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8009 break;
8010
8011 default:
8012 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
8013 }
8014}
8015
8016void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
8017 Primitive::Type type = instruction->GetType();
8018 LocationSummary* locations = instruction->GetLocations();
8019 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
8020
8021 switch (type) {
8022 case Primitive::kPrimInt: {
8023 Register dst = locations->Out().AsRegister<Register>();
8024 Register lhs = locations->InAt(0).AsRegister<Register>();
8025 Register rhs = locations->InAt(1).AsRegister<Register>();
8026
8027 if (isR6) {
8028 __ MulR6(dst, lhs, rhs);
8029 } else {
8030 __ MulR2(dst, lhs, rhs);
8031 }
8032 break;
8033 }
8034 case Primitive::kPrimLong: {
8035 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8036 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8037 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8038 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
8039 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
8040 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
8041
8042 // Extra checks to protect caused by the existance of A1_A2.
8043 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
8044 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
8045 DCHECK_NE(dst_high, lhs_low);
8046 DCHECK_NE(dst_high, rhs_low);
8047
8048 // A_B * C_D
8049 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
8050 // dst_lo: [ low(B*D) ]
8051 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
8052
8053 if (isR6) {
8054 __ MulR6(TMP, lhs_high, rhs_low);
8055 __ MulR6(dst_high, lhs_low, rhs_high);
8056 __ Addu(dst_high, dst_high, TMP);
8057 __ MuhuR6(TMP, lhs_low, rhs_low);
8058 __ Addu(dst_high, dst_high, TMP);
8059 __ MulR6(dst_low, lhs_low, rhs_low);
8060 } else {
8061 __ MulR2(TMP, lhs_high, rhs_low);
8062 __ MulR2(dst_high, lhs_low, rhs_high);
8063 __ Addu(dst_high, dst_high, TMP);
8064 __ MultuR2(lhs_low, rhs_low);
8065 __ Mfhi(TMP);
8066 __ Addu(dst_high, dst_high, TMP);
8067 __ Mflo(dst_low);
8068 }
8069 break;
8070 }
8071 case Primitive::kPrimFloat:
8072 case Primitive::kPrimDouble: {
8073 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8074 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
8075 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
8076 if (type == Primitive::kPrimFloat) {
8077 __ MulS(dst, lhs, rhs);
8078 } else {
8079 __ MulD(dst, lhs, rhs);
8080 }
8081 break;
8082 }
8083 default:
8084 LOG(FATAL) << "Unexpected mul type " << type;
8085 }
8086}
8087
8088void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
8089 LocationSummary* locations =
8090 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
8091 switch (neg->GetResultType()) {
8092 case Primitive::kPrimInt:
8093 case Primitive::kPrimLong:
8094 locations->SetInAt(0, Location::RequiresRegister());
8095 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8096 break;
8097
8098 case Primitive::kPrimFloat:
8099 case Primitive::kPrimDouble:
8100 locations->SetInAt(0, Location::RequiresFpuRegister());
8101 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8102 break;
8103
8104 default:
8105 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
8106 }
8107}
8108
8109void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
8110 Primitive::Type type = instruction->GetType();
8111 LocationSummary* locations = instruction->GetLocations();
8112
8113 switch (type) {
8114 case Primitive::kPrimInt: {
8115 Register dst = locations->Out().AsRegister<Register>();
8116 Register src = locations->InAt(0).AsRegister<Register>();
8117 __ Subu(dst, ZERO, src);
8118 break;
8119 }
8120 case Primitive::kPrimLong: {
8121 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8122 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8123 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8124 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8125 __ Subu(dst_low, ZERO, src_low);
8126 __ Sltu(TMP, ZERO, dst_low);
8127 __ Subu(dst_high, ZERO, src_high);
8128 __ Subu(dst_high, dst_high, TMP);
8129 break;
8130 }
8131 case Primitive::kPrimFloat:
8132 case Primitive::kPrimDouble: {
8133 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8134 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8135 if (type == Primitive::kPrimFloat) {
8136 __ NegS(dst, src);
8137 } else {
8138 __ NegD(dst, src);
8139 }
8140 break;
8141 }
8142 default:
8143 LOG(FATAL) << "Unexpected neg type " << type;
8144 }
8145}
8146
8147void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
8148 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008149 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008150 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008151 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008152 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8153 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008154}
8155
8156void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008157 // Note: if heap poisoning is enabled, the entry point takes care
8158 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02008159 QuickEntrypointEnum entrypoint =
8160 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
8161 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008162 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02008163 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008164}
8165
8166void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
8167 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008168 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008169 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00008170 if (instruction->IsStringAlloc()) {
8171 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
8172 } else {
8173 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00008174 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008175 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
8176}
8177
8178void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008179 // Note: if heap poisoning is enabled, the entry point takes care
8180 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00008181 if (instruction->IsStringAlloc()) {
8182 // String is allocated through StringFactory. Call NewEmptyString entry point.
8183 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07008184 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00008185 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
8186 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
8187 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07008188 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00008189 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
8190 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008191 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00008192 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00008193 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008194}
8195
8196void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
8197 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8198 locations->SetInAt(0, Location::RequiresRegister());
8199 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8200}
8201
8202void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
8203 Primitive::Type type = instruction->GetType();
8204 LocationSummary* locations = instruction->GetLocations();
8205
8206 switch (type) {
8207 case Primitive::kPrimInt: {
8208 Register dst = locations->Out().AsRegister<Register>();
8209 Register src = locations->InAt(0).AsRegister<Register>();
8210 __ Nor(dst, src, ZERO);
8211 break;
8212 }
8213
8214 case Primitive::kPrimLong: {
8215 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8216 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8217 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8218 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8219 __ Nor(dst_high, src_high, ZERO);
8220 __ Nor(dst_low, src_low, ZERO);
8221 break;
8222 }
8223
8224 default:
8225 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
8226 }
8227}
8228
8229void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8230 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8231 locations->SetInAt(0, Location::RequiresRegister());
8232 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8233}
8234
8235void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8236 LocationSummary* locations = instruction->GetLocations();
8237 __ Xori(locations->Out().AsRegister<Register>(),
8238 locations->InAt(0).AsRegister<Register>(),
8239 1);
8240}
8241
8242void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01008243 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
8244 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008245}
8246
Calin Juravle2ae48182016-03-16 14:05:09 +00008247void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
8248 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008249 return;
8250 }
8251 Location obj = instruction->GetLocations()->InAt(0);
8252
8253 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00008254 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008255}
8256
Calin Juravle2ae48182016-03-16 14:05:09 +00008257void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008258 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00008259 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008260
8261 Location obj = instruction->GetLocations()->InAt(0);
8262
8263 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
8264}
8265
8266void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00008267 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008268}
8269
8270void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
8271 HandleBinaryOp(instruction);
8272}
8273
8274void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
8275 HandleBinaryOp(instruction);
8276}
8277
8278void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
8279 LOG(FATAL) << "Unreachable";
8280}
8281
8282void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
8283 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
8284}
8285
8286void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
8287 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8288 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
8289 if (location.IsStackSlot()) {
8290 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8291 } else if (location.IsDoubleStackSlot()) {
8292 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8293 }
8294 locations->SetOut(location);
8295}
8296
8297void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
8298 ATTRIBUTE_UNUSED) {
8299 // Nothing to do, the parameter is already at its location.
8300}
8301
8302void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
8303 LocationSummary* locations =
8304 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8305 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
8306}
8307
8308void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
8309 ATTRIBUTE_UNUSED) {
8310 // Nothing to do, the method is already at its location.
8311}
8312
8313void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
8314 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01008315 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008316 locations->SetInAt(i, Location::Any());
8317 }
8318 locations->SetOut(Location::Any());
8319}
8320
8321void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
8322 LOG(FATAL) << "Unreachable";
8323}
8324
8325void LocationsBuilderMIPS::VisitRem(HRem* rem) {
8326 Primitive::Type type = rem->GetResultType();
8327 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008328 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008329 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
8330
8331 switch (type) {
8332 case Primitive::kPrimInt:
8333 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008334 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008335 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8336 break;
8337
8338 case Primitive::kPrimLong: {
8339 InvokeRuntimeCallingConvention calling_convention;
8340 locations->SetInAt(0, Location::RegisterPairLocation(
8341 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8342 locations->SetInAt(1, Location::RegisterPairLocation(
8343 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8344 locations->SetOut(calling_convention.GetReturnLocation(type));
8345 break;
8346 }
8347
8348 case Primitive::kPrimFloat:
8349 case Primitive::kPrimDouble: {
8350 InvokeRuntimeCallingConvention calling_convention;
8351 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8352 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8353 locations->SetOut(calling_convention.GetReturnLocation(type));
8354 break;
8355 }
8356
8357 default:
8358 LOG(FATAL) << "Unexpected rem type " << type;
8359 }
8360}
8361
8362void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8363 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008364
8365 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008366 case Primitive::kPrimInt:
8367 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008368 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008369 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008370 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008371 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8372 break;
8373 }
8374 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008375 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008376 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008377 break;
8378 }
8379 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008380 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008381 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008382 break;
8383 }
8384 default:
8385 LOG(FATAL) << "Unexpected rem type " << type;
8386 }
8387}
8388
Igor Murashkind01745e2017-04-05 16:40:31 -07008389void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8390 constructor_fence->SetLocations(nullptr);
8391}
8392
8393void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8394 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8395 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8396}
8397
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008398void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8399 memory_barrier->SetLocations(nullptr);
8400}
8401
8402void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8403 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8404}
8405
8406void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8407 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8408 Primitive::Type return_type = ret->InputAt(0)->GetType();
8409 locations->SetInAt(0, MipsReturnLocation(return_type));
8410}
8411
8412void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8413 codegen_->GenerateFrameExit();
8414}
8415
8416void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8417 ret->SetLocations(nullptr);
8418}
8419
8420void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8421 codegen_->GenerateFrameExit();
8422}
8423
Alexey Frunze92d90602015-12-18 18:16:36 -08008424void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8425 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008426}
8427
Alexey Frunze92d90602015-12-18 18:16:36 -08008428void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8429 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008430}
8431
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008432void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8433 HandleShift(shl);
8434}
8435
8436void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8437 HandleShift(shl);
8438}
8439
8440void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8441 HandleShift(shr);
8442}
8443
8444void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8445 HandleShift(shr);
8446}
8447
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008448void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8449 HandleBinaryOp(instruction);
8450}
8451
8452void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8453 HandleBinaryOp(instruction);
8454}
8455
8456void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8457 HandleFieldGet(instruction, instruction->GetFieldInfo());
8458}
8459
8460void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8461 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8462}
8463
8464void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8465 HandleFieldSet(instruction, instruction->GetFieldInfo());
8466}
8467
8468void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008469 HandleFieldSet(instruction,
8470 instruction->GetFieldInfo(),
8471 instruction->GetDexPc(),
8472 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008473}
8474
8475void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8476 HUnresolvedInstanceFieldGet* instruction) {
8477 FieldAccessCallingConventionMIPS calling_convention;
8478 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8479 instruction->GetFieldType(),
8480 calling_convention);
8481}
8482
8483void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8484 HUnresolvedInstanceFieldGet* instruction) {
8485 FieldAccessCallingConventionMIPS calling_convention;
8486 codegen_->GenerateUnresolvedFieldAccess(instruction,
8487 instruction->GetFieldType(),
8488 instruction->GetFieldIndex(),
8489 instruction->GetDexPc(),
8490 calling_convention);
8491}
8492
8493void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8494 HUnresolvedInstanceFieldSet* instruction) {
8495 FieldAccessCallingConventionMIPS calling_convention;
8496 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8497 instruction->GetFieldType(),
8498 calling_convention);
8499}
8500
8501void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8502 HUnresolvedInstanceFieldSet* instruction) {
8503 FieldAccessCallingConventionMIPS calling_convention;
8504 codegen_->GenerateUnresolvedFieldAccess(instruction,
8505 instruction->GetFieldType(),
8506 instruction->GetFieldIndex(),
8507 instruction->GetDexPc(),
8508 calling_convention);
8509}
8510
8511void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8512 HUnresolvedStaticFieldGet* instruction) {
8513 FieldAccessCallingConventionMIPS calling_convention;
8514 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8515 instruction->GetFieldType(),
8516 calling_convention);
8517}
8518
8519void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8520 HUnresolvedStaticFieldGet* instruction) {
8521 FieldAccessCallingConventionMIPS calling_convention;
8522 codegen_->GenerateUnresolvedFieldAccess(instruction,
8523 instruction->GetFieldType(),
8524 instruction->GetFieldIndex(),
8525 instruction->GetDexPc(),
8526 calling_convention);
8527}
8528
8529void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8530 HUnresolvedStaticFieldSet* instruction) {
8531 FieldAccessCallingConventionMIPS calling_convention;
8532 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8533 instruction->GetFieldType(),
8534 calling_convention);
8535}
8536
8537void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8538 HUnresolvedStaticFieldSet* instruction) {
8539 FieldAccessCallingConventionMIPS calling_convention;
8540 codegen_->GenerateUnresolvedFieldAccess(instruction,
8541 instruction->GetFieldType(),
8542 instruction->GetFieldIndex(),
8543 instruction->GetDexPc(),
8544 calling_convention);
8545}
8546
8547void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008548 LocationSummary* locations =
8549 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Lena Djokicca8c2952017-05-29 11:31:46 +02008550 // In suspend check slow path, usually there are no caller-save registers at all.
8551 // If SIMD instructions are present, however, we force spilling all live SIMD
8552 // registers in full width (since the runtime only saves/restores lower part).
8553 locations->SetCustomSlowPathCallerSaves(
8554 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008555}
8556
8557void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8558 HBasicBlock* block = instruction->GetBlock();
8559 if (block->GetLoopInformation() != nullptr) {
8560 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8561 // The back edge will generate the suspend check.
8562 return;
8563 }
8564 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8565 // The goto will generate the suspend check.
8566 return;
8567 }
8568 GenerateSuspendCheck(instruction, nullptr);
8569}
8570
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008571void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8572 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008573 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008574 InvokeRuntimeCallingConvention calling_convention;
8575 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8576}
8577
8578void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008579 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008580 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8581}
8582
8583void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8584 Primitive::Type input_type = conversion->GetInputType();
8585 Primitive::Type result_type = conversion->GetResultType();
8586 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008587 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008588
8589 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8590 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8591 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8592 }
8593
8594 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008595 if (!isR6 &&
8596 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8597 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008598 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008599 }
8600
8601 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8602
8603 if (call_kind == LocationSummary::kNoCall) {
8604 if (Primitive::IsFloatingPointType(input_type)) {
8605 locations->SetInAt(0, Location::RequiresFpuRegister());
8606 } else {
8607 locations->SetInAt(0, Location::RequiresRegister());
8608 }
8609
8610 if (Primitive::IsFloatingPointType(result_type)) {
8611 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8612 } else {
8613 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8614 }
8615 } else {
8616 InvokeRuntimeCallingConvention calling_convention;
8617
8618 if (Primitive::IsFloatingPointType(input_type)) {
8619 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8620 } else {
8621 DCHECK_EQ(input_type, Primitive::kPrimLong);
8622 locations->SetInAt(0, Location::RegisterPairLocation(
8623 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8624 }
8625
8626 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8627 }
8628}
8629
8630void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8631 LocationSummary* locations = conversion->GetLocations();
8632 Primitive::Type result_type = conversion->GetResultType();
8633 Primitive::Type input_type = conversion->GetInputType();
8634 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008635 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008636
8637 DCHECK_NE(input_type, result_type);
8638
8639 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8640 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8641 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8642 Register src = locations->InAt(0).AsRegister<Register>();
8643
Alexey Frunzea871ef12016-06-27 15:20:11 -07008644 if (dst_low != src) {
8645 __ Move(dst_low, src);
8646 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008647 __ Sra(dst_high, src, 31);
8648 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8649 Register dst = locations->Out().AsRegister<Register>();
8650 Register src = (input_type == Primitive::kPrimLong)
8651 ? locations->InAt(0).AsRegisterPairLow<Register>()
8652 : locations->InAt(0).AsRegister<Register>();
8653
8654 switch (result_type) {
8655 case Primitive::kPrimChar:
8656 __ Andi(dst, src, 0xFFFF);
8657 break;
8658 case Primitive::kPrimByte:
8659 if (has_sign_extension) {
8660 __ Seb(dst, src);
8661 } else {
8662 __ Sll(dst, src, 24);
8663 __ Sra(dst, dst, 24);
8664 }
8665 break;
8666 case Primitive::kPrimShort:
8667 if (has_sign_extension) {
8668 __ Seh(dst, src);
8669 } else {
8670 __ Sll(dst, src, 16);
8671 __ Sra(dst, dst, 16);
8672 }
8673 break;
8674 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008675 if (dst != src) {
8676 __ Move(dst, src);
8677 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008678 break;
8679
8680 default:
8681 LOG(FATAL) << "Unexpected type conversion from " << input_type
8682 << " to " << result_type;
8683 }
8684 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008685 if (input_type == Primitive::kPrimLong) {
8686 if (isR6) {
8687 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8688 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8689 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8690 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8691 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8692 __ Mtc1(src_low, FTMP);
8693 __ Mthc1(src_high, FTMP);
8694 if (result_type == Primitive::kPrimFloat) {
8695 __ Cvtsl(dst, FTMP);
8696 } else {
8697 __ Cvtdl(dst, FTMP);
8698 }
8699 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008700 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8701 : kQuickL2d;
8702 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008703 if (result_type == Primitive::kPrimFloat) {
8704 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8705 } else {
8706 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8707 }
8708 }
8709 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008710 Register src = locations->InAt(0).AsRegister<Register>();
8711 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8712 __ Mtc1(src, FTMP);
8713 if (result_type == Primitive::kPrimFloat) {
8714 __ Cvtsw(dst, FTMP);
8715 } else {
8716 __ Cvtdw(dst, FTMP);
8717 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008718 }
8719 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8720 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008721
8722 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8723 // value of the output type if the input is outside of the range after the truncation or
8724 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8725 // results. This matches the desired float/double-to-int/long conversion exactly.
8726 //
8727 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8728 // value when the input is either a NaN or is outside of the range of the output type
8729 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8730 // the same result.
8731 //
8732 // The code takes care of the different behaviors by first comparing the input to the
8733 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8734 // If the input is greater than or equal to the minimum, it procedes to the truncate
8735 // instruction, which will handle such an input the same way irrespective of NAN2008.
8736 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8737 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008738 if (result_type == Primitive::kPrimLong) {
8739 if (isR6) {
8740 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8741 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8742 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8743 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8744 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008745
8746 if (input_type == Primitive::kPrimFloat) {
8747 __ TruncLS(FTMP, src);
8748 } else {
8749 __ TruncLD(FTMP, src);
8750 }
8751 __ Mfc1(dst_low, FTMP);
8752 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008753 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008754 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8755 : kQuickD2l;
8756 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008757 if (input_type == Primitive::kPrimFloat) {
8758 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8759 } else {
8760 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8761 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008762 }
8763 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008764 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8765 Register dst = locations->Out().AsRegister<Register>();
8766 MipsLabel truncate;
8767 MipsLabel done;
8768
Lena Djokicf4e23a82017-05-09 15:43:45 +02008769 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008770 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008771 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8772 __ LoadConst32(TMP, min_val);
8773 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008774 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008775 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8776 __ LoadConst32(TMP, High32Bits(min_val));
8777 __ Mtc1(ZERO, FTMP);
8778 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008779 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008780
8781 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008782 __ ColeS(0, FTMP, src);
8783 } else {
8784 __ ColeD(0, FTMP, src);
8785 }
8786 __ Bc1t(0, &truncate);
8787
8788 if (input_type == Primitive::kPrimFloat) {
8789 __ CeqS(0, src, src);
8790 } else {
8791 __ CeqD(0, src, src);
8792 }
8793 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8794 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008795
8796 __ B(&done);
8797
8798 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008799 }
8800
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008801 if (input_type == Primitive::kPrimFloat) {
8802 __ TruncWS(FTMP, src);
8803 } else {
8804 __ TruncWD(FTMP, src);
8805 }
8806 __ Mfc1(dst, FTMP);
8807
Lena Djokicf4e23a82017-05-09 15:43:45 +02008808 if (!isR6) {
8809 __ Bind(&done);
8810 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008811 }
8812 } else if (Primitive::IsFloatingPointType(result_type) &&
8813 Primitive::IsFloatingPointType(input_type)) {
8814 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8815 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8816 if (result_type == Primitive::kPrimFloat) {
8817 __ Cvtsd(dst, src);
8818 } else {
8819 __ Cvtds(dst, src);
8820 }
8821 } else {
8822 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8823 << " to " << result_type;
8824 }
8825}
8826
8827void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8828 HandleShift(ushr);
8829}
8830
8831void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8832 HandleShift(ushr);
8833}
8834
8835void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8836 HandleBinaryOp(instruction);
8837}
8838
8839void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8840 HandleBinaryOp(instruction);
8841}
8842
8843void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8844 // Nothing to do, this should be removed during prepare for register allocator.
8845 LOG(FATAL) << "Unreachable";
8846}
8847
8848void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8849 // Nothing to do, this should be removed during prepare for register allocator.
8850 LOG(FATAL) << "Unreachable";
8851}
8852
8853void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008854 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008855}
8856
8857void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008858 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008859}
8860
8861void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008862 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008863}
8864
8865void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008866 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008867}
8868
8869void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008870 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008871}
8872
8873void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008874 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008875}
8876
8877void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008878 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008879}
8880
8881void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008882 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008883}
8884
8885void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008886 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008887}
8888
8889void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008890 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008891}
8892
8893void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008894 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008895}
8896
8897void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008898 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008899}
8900
8901void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008902 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008903}
8904
8905void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008906 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008907}
8908
8909void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008910 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008911}
8912
8913void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008914 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008915}
8916
8917void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008918 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008919}
8920
8921void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008922 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008923}
8924
8925void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008926 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008927}
8928
8929void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008930 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008931}
8932
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008933void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8934 LocationSummary* locations =
8935 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8936 locations->SetInAt(0, Location::RequiresRegister());
8937}
8938
Alexey Frunze96b66822016-09-10 02:32:44 -07008939void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8940 int32_t lower_bound,
8941 uint32_t num_entries,
8942 HBasicBlock* switch_block,
8943 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008944 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008945 Register temp_reg = TMP;
8946 __ Addiu32(temp_reg, value_reg, -lower_bound);
8947 // Jump to default if index is negative
8948 // Note: We don't check the case that index is positive while value < lower_bound, because in
8949 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8950 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8951
Alexey Frunze96b66822016-09-10 02:32:44 -07008952 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008953 // Jump to successors[0] if value == lower_bound.
8954 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8955 int32_t last_index = 0;
8956 for (; num_entries - last_index > 2; last_index += 2) {
8957 __ Addiu(temp_reg, temp_reg, -2);
8958 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8959 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8960 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8961 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8962 }
8963 if (num_entries - last_index == 2) {
8964 // The last missing case_value.
8965 __ Addiu(temp_reg, temp_reg, -1);
8966 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008967 }
8968
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008969 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008970 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008971 __ B(codegen_->GetLabelOf(default_block));
8972 }
8973}
8974
Alexey Frunze96b66822016-09-10 02:32:44 -07008975void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8976 Register constant_area,
8977 int32_t lower_bound,
8978 uint32_t num_entries,
8979 HBasicBlock* switch_block,
8980 HBasicBlock* default_block) {
8981 // Create a jump table.
8982 std::vector<MipsLabel*> labels(num_entries);
8983 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8984 for (uint32_t i = 0; i < num_entries; i++) {
8985 labels[i] = codegen_->GetLabelOf(successors[i]);
8986 }
8987 JumpTable* table = __ CreateJumpTable(std::move(labels));
8988
8989 // Is the value in range?
8990 __ Addiu32(TMP, value_reg, -lower_bound);
8991 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
8992 __ Sltiu(AT, TMP, num_entries);
8993 __ Beqz(AT, codegen_->GetLabelOf(default_block));
8994 } else {
8995 __ LoadConst32(AT, num_entries);
8996 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
8997 }
8998
8999 // We are in the range of the table.
9000 // Load the target address from the jump table, indexing by the value.
9001 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07009002 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07009003 __ Lw(TMP, TMP, 0);
9004 // Compute the absolute target address by adding the table start address
9005 // (the table contains offsets to targets relative to its start).
9006 __ Addu(TMP, TMP, AT);
9007 // And jump.
9008 __ Jr(TMP);
9009 __ NopIfNoReordering();
9010}
9011
9012void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9013 int32_t lower_bound = switch_instr->GetStartValue();
9014 uint32_t num_entries = switch_instr->GetNumEntries();
9015 LocationSummary* locations = switch_instr->GetLocations();
9016 Register value_reg = locations->InAt(0).AsRegister<Register>();
9017 HBasicBlock* switch_block = switch_instr->GetBlock();
9018 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9019
9020 if (codegen_->GetInstructionSetFeatures().IsR6() &&
9021 num_entries > kPackedSwitchJumpTableThreshold) {
9022 // R6 uses PC-relative addressing to access the jump table.
9023 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
9024 // the jump table and it is implemented by changing HPackedSwitch to
9025 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
9026 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
9027 GenTableBasedPackedSwitch(value_reg,
9028 ZERO,
9029 lower_bound,
9030 num_entries,
9031 switch_block,
9032 default_block);
9033 } else {
9034 GenPackedSwitchWithCompares(value_reg,
9035 lower_bound,
9036 num_entries,
9037 switch_block,
9038 default_block);
9039 }
9040}
9041
9042void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9043 LocationSummary* locations =
9044 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
9045 locations->SetInAt(0, Location::RequiresRegister());
9046 // Constant area pointer (HMipsComputeBaseMethodAddress).
9047 locations->SetInAt(1, Location::RequiresRegister());
9048}
9049
9050void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9051 int32_t lower_bound = switch_instr->GetStartValue();
9052 uint32_t num_entries = switch_instr->GetNumEntries();
9053 LocationSummary* locations = switch_instr->GetLocations();
9054 Register value_reg = locations->InAt(0).AsRegister<Register>();
9055 Register constant_area = locations->InAt(1).AsRegister<Register>();
9056 HBasicBlock* switch_block = switch_instr->GetBlock();
9057 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9058
9059 // This is an R2-only path. HPackedSwitch has been changed to
9060 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
9061 // required to address the jump table relative to PC.
9062 GenTableBasedPackedSwitch(value_reg,
9063 constant_area,
9064 lower_bound,
9065 num_entries,
9066 switch_block,
9067 default_block);
9068}
9069
Alexey Frunzee3fb2452016-05-10 16:08:05 -07009070void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
9071 HMipsComputeBaseMethodAddress* insn) {
9072 LocationSummary* locations =
9073 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
9074 locations->SetOut(Location::RequiresRegister());
9075}
9076
9077void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
9078 HMipsComputeBaseMethodAddress* insn) {
9079 LocationSummary* locations = insn->GetLocations();
9080 Register reg = locations->Out().AsRegister<Register>();
9081
9082 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
9083
9084 // Generate a dummy PC-relative call to obtain PC.
9085 __ Nal();
9086 // Grab the return address off RA.
9087 __ Move(reg, RA);
9088
9089 // Remember this offset (the obtained PC value) for later use with constant area.
9090 __ BindPcRelBaseLabel();
9091}
9092
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009093void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9094 // The trampoline uses the same calling convention as dex calling conventions,
9095 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
9096 // the method_idx.
9097 HandleInvoke(invoke);
9098}
9099
9100void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9101 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
9102}
9103
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009104void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9105 LocationSummary* locations =
9106 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
9107 locations->SetInAt(0, Location::RequiresRegister());
9108 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009109}
9110
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009111void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9112 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00009113 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009114 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009115 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009116 __ LoadFromOffset(kLoadWord,
9117 locations->Out().AsRegister<Register>(),
9118 locations->InAt(0).AsRegister<Register>(),
9119 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009120 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009121 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00009122 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00009123 __ LoadFromOffset(kLoadWord,
9124 locations->Out().AsRegister<Register>(),
9125 locations->InAt(0).AsRegister<Register>(),
9126 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009127 __ LoadFromOffset(kLoadWord,
9128 locations->Out().AsRegister<Register>(),
9129 locations->Out().AsRegister<Register>(),
9130 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009131 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009132}
9133
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009134#undef __
9135#undef QUICK_ENTRY_POINT
9136
9137} // namespace mips
9138} // namespace art