blob: ffde45e95e070403f6e2523827f6c517a207dd0a [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010027#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070028#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070029#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070030#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010031#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070032#include "mirror/array-inl.h"
33#include "mirror/class-inl.h"
34#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010035#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070037#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070038#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070039#include "utils/stack_checks.h"
40
41namespace art {
42namespace mips64 {
43
44static constexpr int kCurrentMethodStackOffset = 0;
45static constexpr GpuRegister kMethodRegisterArgument = A0;
46
Alexey Frunze4147fcc2017-06-17 19:57:27 -070047// Flags controlling the use of thunks for Baker read barriers.
48constexpr bool kBakerReadBarrierThunksEnableForFields = true;
49constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
50constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
51
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070053 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010054 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010055 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 case DataType::Type::kInt8:
57 case DataType::Type::kUint16:
58 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080059 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010060 case DataType::Type::kInt32:
61 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080062 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070064 return Location::RegisterLocation(V0);
65
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010066 case DataType::Type::kFloat32:
67 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070068 return Location::FpuRegisterLocation(F0);
69
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010070 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070071 return Location();
72 }
73 UNREACHABLE();
74}
75
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010076Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070077 return Mips64ReturnLocation(type);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
81 return Location::RegisterLocation(kMethodRegisterArgument);
82}
83
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010084Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070085 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010086 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070087 LOG(FATAL) << "Unexpected parameter type " << type;
88 }
89
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010090 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070091 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
92 next_location = Location::FpuRegisterLocation(
93 calling_convention.GetFpuRegisterAt(float_index_++));
94 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010095 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070096 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
97 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
98 float_index_++;
99 } else {
100 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100101 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
102 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700103 }
104
105 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100106 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108 return next_location;
109}
110
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100111Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700112 return Mips64ReturnLocation(type);
113}
114
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100115// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
116#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700117#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700118
119class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
120 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000121 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700122
123 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100124 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700125 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
126 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000127 if (instruction_->CanThrowIntoCatchBlock()) {
128 // Live registers will be restored in the catch block if caught.
129 SaveLiveRegisters(codegen, instruction_->GetLocations());
130 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700131 // We're moving two locations to locations that could overlap, so we need a parallel
132 // move resolver.
133 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100134 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700135 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100136 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100137 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700138 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100139 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100140 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
141 ? kQuickThrowStringBounds
142 : kQuickThrowArrayBounds;
143 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100144 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700145 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
146 }
147
Alexandre Rames8158f282015-08-07 10:26:17 +0100148 bool IsFatal() const OVERRIDE { return true; }
149
Roland Levillain46648892015-06-19 16:07:18 +0100150 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
151
Alexey Frunze4dda3372015-06-01 18:31:49 -0700152 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
154};
155
156class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
157 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700158 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
159 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160
161 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
162 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
163 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100164 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700165 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
166 }
167
Alexandre Rames8158f282015-08-07 10:26:17 +0100168 bool IsFatal() const OVERRIDE { return true; }
169
Roland Levillain46648892015-06-19 16:07:18 +0100170 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
171
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700173 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
174};
175
176class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
177 public:
178 LoadClassSlowPathMIPS64(HLoadClass* cls,
179 HInstruction* at,
180 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000181 bool do_clinit)
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700182 : SlowPathCodeMIPS64(at),
183 cls_(cls),
184 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000185 do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700186 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
187 }
188
189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000190 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700191 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700192 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700193 InvokeRuntimeCallingConvention calling_convention;
194 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700195 __ Bind(GetEntryLabel());
196 SaveLiveRegisters(codegen, locations);
197
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000198 dex::TypeIndex type_index = cls_->GetTypeIndex();
199 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100200 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
201 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000202 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700203 if (do_clinit_) {
204 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
205 } else {
206 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
207 }
208
209 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700210 if (out.IsValid()) {
211 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100212 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700213 mips64_codegen->MoveLocation(out,
214 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
215 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700217 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700218
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700219 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700220 }
221
Roland Levillain46648892015-06-19 16:07:18 +0100222 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
223
Alexey Frunze4dda3372015-06-01 18:31:49 -0700224 private:
225 // The class this slow path will load.
226 HLoadClass* const cls_;
227
Alexey Frunze4dda3372015-06-01 18:31:49 -0700228 // The dex PC of `at_`.
229 const uint32_t dex_pc_;
230
231 // Whether to initialize the class.
232 const bool do_clinit_;
233
234 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
235};
236
237class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
238 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000239 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
240 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700241
242 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700243 DCHECK(instruction_->IsLoadString());
244 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700245 LocationSummary* locations = instruction_->GetLocations();
246 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000247 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700248 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700249 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700250 __ Bind(GetEntryLabel());
251 SaveLiveRegisters(codegen, locations);
252
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000253 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100254 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700255 instruction_,
256 instruction_->GetDexPc(),
257 this);
258 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100260 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700261 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700262 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700264 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800265
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700266 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700267 }
268
Roland Levillain46648892015-06-19 16:07:18 +0100269 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
270
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
273};
274
275class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
276 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000277 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700278
279 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
280 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
281 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000282 if (instruction_->CanThrowIntoCatchBlock()) {
283 // Live registers will be restored in the catch block if caught.
284 SaveLiveRegisters(codegen, instruction_->GetLocations());
285 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100286 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700287 instruction_,
288 instruction_->GetDexPc(),
289 this);
290 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
291 }
292
Alexandre Rames8158f282015-08-07 10:26:17 +0100293 bool IsFatal() const OVERRIDE { return true; }
294
Roland Levillain46648892015-06-19 16:07:18 +0100295 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
296
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700298 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
299};
300
301class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
302 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100303 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000304 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305
306 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200307 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700308 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
309 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200310 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100311 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700312 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200313 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700314 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700315 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700317 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700318 }
319 }
320
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700321 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700322 DCHECK(successor_ == nullptr);
323 return &return_label_;
324 }
325
Roland Levillain46648892015-06-19 16:07:18 +0100326 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
327
Chris Larsena2045912017-11-02 12:39:54 -0700328 HBasicBlock* GetSuccessor() const {
329 return successor_;
330 }
331
Alexey Frunze4dda3372015-06-01 18:31:49 -0700332 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700333 // If not null, the block to branch to after the suspend check.
334 HBasicBlock* const successor_;
335
336 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700337 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338
339 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
340};
341
342class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
343 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800344 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
345 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700346
347 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
348 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800349
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100350 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700351 DCHECK(instruction_->IsCheckCast()
352 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
353 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
354
355 __ Bind(GetEntryLabel());
Alexey Frunzedfc30af2018-01-24 16:25:10 -0800356 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800357 SaveLiveRegisters(codegen, locations);
358 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700359
360 // We're moving two locations to locations that could overlap, so we need a parallel
361 // move resolver.
362 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800363 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700364 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100365 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800366 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700367 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100368 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700369 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100370 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800371 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700373 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
374 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 } else {
376 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800377 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
378 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700379 }
380
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800381 if (!is_fatal_) {
382 RestoreLiveRegisters(codegen, locations);
383 __ Bc(GetExitLabel());
384 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700385 }
386
Roland Levillain46648892015-06-19 16:07:18 +0100387 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
388
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800389 bool IsFatal() const OVERRIDE { return is_fatal_; }
390
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800392 const bool is_fatal_;
393
Alexey Frunze4dda3372015-06-01 18:31:49 -0700394 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
395};
396
397class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
398 public:
Aart Bik42249c32016-01-07 15:33:50 -0800399 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000400 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700401
402 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800403 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700404 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100405 LocationSummary* locations = instruction_->GetLocations();
406 SaveLiveRegisters(codegen, locations);
407 InvokeRuntimeCallingConvention calling_convention;
408 __ LoadConst32(calling_convention.GetRegisterAt(0),
409 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100410 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100411 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 }
413
Roland Levillain46648892015-06-19 16:07:18 +0100414 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
415
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700417 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
418};
419
Alexey Frunze15958152017-02-09 19:08:30 -0800420class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
421 public:
422 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
423
424 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
425 LocationSummary* locations = instruction_->GetLocations();
426 __ Bind(GetEntryLabel());
427 SaveLiveRegisters(codegen, locations);
428
429 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100430 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800431 parallel_move.AddMove(
432 locations->InAt(0),
433 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100434 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800435 nullptr);
436 parallel_move.AddMove(
437 locations->InAt(1),
438 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100439 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800440 nullptr);
441 parallel_move.AddMove(
442 locations->InAt(2),
443 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100444 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800445 nullptr);
446 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
447
448 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
449 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
450 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
451 RestoreLiveRegisters(codegen, locations);
452 __ Bc(GetExitLabel());
453 }
454
455 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
456
457 private:
458 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
459};
460
461// Slow path marking an object reference `ref` during a read
462// barrier. The field `obj.field` in the object `obj` holding this
463// reference does not get updated by this slow path after marking (see
464// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
465//
466// This means that after the execution of this slow path, `ref` will
467// always be up-to-date, but `obj.field` may not; i.e., after the
468// flip, `ref` will be a to-space reference, but `obj.field` will
469// probably still be a from-space reference (unless it gets updated by
470// another thread, or if another thread installed another object
471// reference (different from `ref`) in `obj.field`).
472//
473// If `entrypoint` is a valid location it is assumed to already be
474// holding the entrypoint. The case where the entrypoint is passed in
475// is for the GcRoot read barrier.
476class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
477 public:
478 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
479 Location ref,
480 Location entrypoint = Location::NoLocation())
481 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
482 DCHECK(kEmitCompilerReadBarrier);
483 }
484
485 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
486
487 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
488 LocationSummary* locations = instruction_->GetLocations();
489 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
490 DCHECK(locations->CanCall());
491 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
492 DCHECK(instruction_->IsInstanceFieldGet() ||
493 instruction_->IsStaticFieldGet() ||
494 instruction_->IsArrayGet() ||
495 instruction_->IsArraySet() ||
496 instruction_->IsLoadClass() ||
497 instruction_->IsLoadString() ||
498 instruction_->IsInstanceOf() ||
499 instruction_->IsCheckCast() ||
500 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
501 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
502 << "Unexpected instruction in read barrier marking slow path: "
503 << instruction_->DebugName();
504
505 __ Bind(GetEntryLabel());
506 // No need to save live registers; it's taken care of by the
507 // entrypoint. Also, there is no need to update the stack mask,
508 // as this runtime call will not trigger a garbage collection.
509 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
510 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
511 (S2 <= ref_reg && ref_reg <= S7) ||
512 (ref_reg == S8)) << ref_reg;
513 // "Compact" slow path, saving two moves.
514 //
515 // Instead of using the standard runtime calling convention (input
516 // and output in A0 and V0 respectively):
517 //
518 // A0 <- ref
519 // V0 <- ReadBarrierMark(A0)
520 // ref <- V0
521 //
522 // we just use rX (the register containing `ref`) as input and output
523 // of a dedicated entrypoint:
524 //
525 // rX <- ReadBarrierMarkRegX(rX)
526 //
527 if (entrypoint_.IsValid()) {
528 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
529 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
530 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
531 __ Nop();
532 } else {
533 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100534 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800535 // This runtime call does not require a stack map.
536 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
537 instruction_,
538 this);
539 }
540 __ Bc(GetExitLabel());
541 }
542
543 private:
544 // The location (register) of the marked object reference.
545 const Location ref_;
546
547 // The location of the entrypoint if already loaded.
548 const Location entrypoint_;
549
550 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
551};
552
553// Slow path marking an object reference `ref` during a read barrier,
554// and if needed, atomically updating the field `obj.field` in the
555// object `obj` holding this reference after marking (contrary to
556// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
557// `obj.field`).
558//
559// This means that after the execution of this slow path, both `ref`
560// and `obj.field` will be up-to-date; i.e., after the flip, both will
561// hold the same to-space reference (unless another thread installed
562// another object reference (different from `ref`) in `obj.field`).
563class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
564 public:
565 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
566 Location ref,
567 GpuRegister obj,
568 Location field_offset,
569 GpuRegister temp1)
570 : SlowPathCodeMIPS64(instruction),
571 ref_(ref),
572 obj_(obj),
573 field_offset_(field_offset),
574 temp1_(temp1) {
575 DCHECK(kEmitCompilerReadBarrier);
576 }
577
578 const char* GetDescription() const OVERRIDE {
579 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
580 }
581
582 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
583 LocationSummary* locations = instruction_->GetLocations();
584 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
585 DCHECK(locations->CanCall());
586 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
587 // This slow path is only used by the UnsafeCASObject intrinsic.
588 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
589 << "Unexpected instruction in read barrier marking and field updating slow path: "
590 << instruction_->DebugName();
591 DCHECK(instruction_->GetLocations()->Intrinsified());
592 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
593 DCHECK(field_offset_.IsRegister()) << field_offset_;
594
595 __ Bind(GetEntryLabel());
596
597 // Save the old reference.
598 // Note that we cannot use AT or TMP to save the old reference, as those
599 // are used by the code that follows, but we need the old reference after
600 // the call to the ReadBarrierMarkRegX entry point.
601 DCHECK_NE(temp1_, AT);
602 DCHECK_NE(temp1_, TMP);
603 __ Move(temp1_, ref_reg);
604
605 // No need to save live registers; it's taken care of by the
606 // entrypoint. Also, there is no need to update the stack mask,
607 // as this runtime call will not trigger a garbage collection.
608 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
609 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
610 (S2 <= ref_reg && ref_reg <= S7) ||
611 (ref_reg == S8)) << ref_reg;
612 // "Compact" slow path, saving two moves.
613 //
614 // Instead of using the standard runtime calling convention (input
615 // and output in A0 and V0 respectively):
616 //
617 // A0 <- ref
618 // V0 <- ReadBarrierMark(A0)
619 // ref <- V0
620 //
621 // we just use rX (the register containing `ref`) as input and output
622 // of a dedicated entrypoint:
623 //
624 // rX <- ReadBarrierMarkRegX(rX)
625 //
626 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100627 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800628 // This runtime call does not require a stack map.
629 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
630 instruction_,
631 this);
632
633 // If the new reference is different from the old reference,
634 // update the field in the holder (`*(obj_ + field_offset_)`).
635 //
636 // Note that this field could also hold a different object, if
637 // another thread had concurrently changed it. In that case, the
638 // the compare-and-set (CAS) loop below would abort, leaving the
639 // field as-is.
640 Mips64Label done;
641 __ Beqc(temp1_, ref_reg, &done);
642
643 // Update the the holder's field atomically. This may fail if
644 // mutator updates before us, but it's OK. This is achieved
645 // using a strong compare-and-set (CAS) operation with relaxed
646 // memory synchronization ordering, where the expected value is
647 // the old reference and the desired value is the new reference.
648
649 // Convenience aliases.
650 GpuRegister base = obj_;
651 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
652 GpuRegister expected = temp1_;
653 GpuRegister value = ref_reg;
654 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
655 GpuRegister tmp = AT; // Value in memory.
656
657 __ Daddu(tmp_ptr, base, offset);
658
659 if (kPoisonHeapReferences) {
660 __ PoisonHeapReference(expected);
661 // Do not poison `value` if it is the same register as
662 // `expected`, which has just been poisoned.
663 if (value != expected) {
664 __ PoisonHeapReference(value);
665 }
666 }
667
668 // do {
669 // tmp = [r_ptr] - expected;
670 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
671
672 Mips64Label loop_head, exit_loop;
673 __ Bind(&loop_head);
674 __ Ll(tmp, tmp_ptr);
675 // The LL instruction sign-extends the 32-bit value, but
676 // 32-bit references must be zero-extended. Zero-extend `tmp`.
677 __ Dext(tmp, tmp, 0, 32);
678 __ Bnec(tmp, expected, &exit_loop);
679 __ Move(tmp, value);
680 __ Sc(tmp, tmp_ptr);
681 __ Beqzc(tmp, &loop_head);
682 __ Bind(&exit_loop);
683
684 if (kPoisonHeapReferences) {
685 __ UnpoisonHeapReference(expected);
686 // Do not unpoison `value` if it is the same register as
687 // `expected`, which has just been unpoisoned.
688 if (value != expected) {
689 __ UnpoisonHeapReference(value);
690 }
691 }
692
693 __ Bind(&done);
694 __ Bc(GetExitLabel());
695 }
696
697 private:
698 // The location (register) of the marked object reference.
699 const Location ref_;
700 // The register containing the object holding the marked object reference field.
701 const GpuRegister obj_;
702 // The location of the offset of the marked reference field within `obj_`.
703 Location field_offset_;
704
705 const GpuRegister temp1_;
706
707 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
708};
709
710// Slow path generating a read barrier for a heap reference.
711class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
712 public:
713 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
714 Location out,
715 Location ref,
716 Location obj,
717 uint32_t offset,
718 Location index)
719 : SlowPathCodeMIPS64(instruction),
720 out_(out),
721 ref_(ref),
722 obj_(obj),
723 offset_(offset),
724 index_(index) {
725 DCHECK(kEmitCompilerReadBarrier);
726 // If `obj` is equal to `out` or `ref`, it means the initial object
727 // has been overwritten by (or after) the heap object reference load
728 // to be instrumented, e.g.:
729 //
730 // __ LoadFromOffset(kLoadWord, out, out, offset);
731 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
732 //
733 // In that case, we have lost the information about the original
734 // object, and the emitted read barrier cannot work properly.
735 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
736 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
737 }
738
739 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
740 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
741 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100742 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800743 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
744 DCHECK(locations->CanCall());
745 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
746 DCHECK(instruction_->IsInstanceFieldGet() ||
747 instruction_->IsStaticFieldGet() ||
748 instruction_->IsArrayGet() ||
749 instruction_->IsInstanceOf() ||
750 instruction_->IsCheckCast() ||
751 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
752 << "Unexpected instruction in read barrier for heap reference slow path: "
753 << instruction_->DebugName();
754
755 __ Bind(GetEntryLabel());
756 SaveLiveRegisters(codegen, locations);
757
758 // We may have to change the index's value, but as `index_` is a
759 // constant member (like other "inputs" of this slow path),
760 // introduce a copy of it, `index`.
761 Location index = index_;
762 if (index_.IsValid()) {
763 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
764 if (instruction_->IsArrayGet()) {
765 // Compute the actual memory offset and store it in `index`.
766 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
767 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
768 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
769 // We are about to change the value of `index_reg` (see the
770 // calls to art::mips64::Mips64Assembler::Sll and
771 // art::mips64::MipsAssembler::Addiu32 below), but it has
772 // not been saved by the previous call to
773 // art::SlowPathCode::SaveLiveRegisters, as it is a
774 // callee-save register --
775 // art::SlowPathCode::SaveLiveRegisters does not consider
776 // callee-save registers, as it has been designed with the
777 // assumption that callee-save registers are supposed to be
778 // handled by the called function. So, as a callee-save
779 // register, `index_reg` _would_ eventually be saved onto
780 // the stack, but it would be too late: we would have
781 // changed its value earlier. Therefore, we manually save
782 // it here into another freely available register,
783 // `free_reg`, chosen of course among the caller-save
784 // registers (as a callee-save `free_reg` register would
785 // exhibit the same problem).
786 //
787 // Note we could have requested a temporary register from
788 // the register allocator instead; but we prefer not to, as
789 // this is a slow path, and we know we can find a
790 // caller-save register that is available.
791 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
792 __ Move(free_reg, index_reg);
793 index_reg = free_reg;
794 index = Location::RegisterLocation(index_reg);
795 } else {
796 // The initial register stored in `index_` has already been
797 // saved in the call to art::SlowPathCode::SaveLiveRegisters
798 // (as it is not a callee-save register), so we can freely
799 // use it.
800 }
801 // Shifting the index value contained in `index_reg` by the scale
802 // factor (2) cannot overflow in practice, as the runtime is
803 // unable to allocate object arrays with a size larger than
804 // 2^26 - 1 (that is, 2^28 - 4 bytes).
805 __ Sll(index_reg, index_reg, TIMES_4);
806 static_assert(
807 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
808 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
809 __ Addiu32(index_reg, index_reg, offset_);
810 } else {
811 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
812 // intrinsics, `index_` is not shifted by a scale factor of 2
813 // (as in the case of ArrayGet), as it is actually an offset
814 // to an object field within an object.
815 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
816 DCHECK(instruction_->GetLocations()->Intrinsified());
817 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
818 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
819 << instruction_->AsInvoke()->GetIntrinsic();
820 DCHECK_EQ(offset_, 0U);
821 DCHECK(index_.IsRegister());
822 }
823 }
824
825 // We're moving two or three locations to locations that could
826 // overlap, so we need a parallel move resolver.
827 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100828 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800829 parallel_move.AddMove(ref_,
830 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100831 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800832 nullptr);
833 parallel_move.AddMove(obj_,
834 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100835 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800836 nullptr);
837 if (index.IsValid()) {
838 parallel_move.AddMove(index,
839 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100840 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800841 nullptr);
842 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
843 } else {
844 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
845 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
846 }
847 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
848 instruction_,
849 instruction_->GetDexPc(),
850 this);
851 CheckEntrypointTypes<
852 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
853 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
854
855 RestoreLiveRegisters(codegen, locations);
856 __ Bc(GetExitLabel());
857 }
858
859 const char* GetDescription() const OVERRIDE {
860 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
861 }
862
863 private:
864 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
865 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
866 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
867 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
868 if (i != ref &&
869 i != obj &&
870 !codegen->IsCoreCalleeSaveRegister(i) &&
871 !codegen->IsBlockedCoreRegister(i)) {
872 return static_cast<GpuRegister>(i);
873 }
874 }
875 // We shall never fail to find a free caller-save register, as
876 // there are more than two core caller-save registers on MIPS64
877 // (meaning it is possible to find one which is different from
878 // `ref` and `obj`).
879 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
880 LOG(FATAL) << "Could not find a free caller-save register";
881 UNREACHABLE();
882 }
883
884 const Location out_;
885 const Location ref_;
886 const Location obj_;
887 const uint32_t offset_;
888 // An additional location containing an index to an array.
889 // Only used for HArrayGet and the UnsafeGetObject &
890 // UnsafeGetObjectVolatile intrinsics.
891 const Location index_;
892
893 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
894};
895
896// Slow path generating a read barrier for a GC root.
897class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
898 public:
899 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
900 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
901 DCHECK(kEmitCompilerReadBarrier);
902 }
903
904 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
905 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100906 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800907 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
908 DCHECK(locations->CanCall());
909 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
910 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
911 << "Unexpected instruction in read barrier for GC root slow path: "
912 << instruction_->DebugName();
913
914 __ Bind(GetEntryLabel());
915 SaveLiveRegisters(codegen, locations);
916
917 InvokeRuntimeCallingConvention calling_convention;
918 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
919 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
920 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100921 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800922 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
923 instruction_,
924 instruction_->GetDexPc(),
925 this);
926 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
927 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
928
929 RestoreLiveRegisters(codegen, locations);
930 __ Bc(GetExitLabel());
931 }
932
933 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
934
935 private:
936 const Location out_;
937 const Location root_;
938
939 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
940};
941
Alexey Frunze4dda3372015-06-01 18:31:49 -0700942CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100943 const CompilerOptions& compiler_options,
944 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700945 : CodeGenerator(graph,
946 kNumberOfGpuRegisters,
947 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000948 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700949 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
950 arraysize(kCoreCalleeSaves)),
951 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
952 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100953 compiler_options,
954 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100955 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700956 location_builder_(graph, this),
957 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100958 move_resolver_(graph->GetAllocator(), this),
Vladimir Markoa0431112018-06-25 09:32:54 +0100959 assembler_(graph->GetAllocator(),
960 compiler_options.GetInstructionSetFeatures()->AsMips64InstructionSetFeatures()),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800961 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100962 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800963 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100964 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000965 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100966 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000967 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100968 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000969 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100970 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100971 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800972 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100973 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800974 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100975 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700976 // Save RA (containing the return address) to mimic Quick.
977 AddAllocatedRegister(Location::RegisterLocation(RA));
978}
979
980#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100981// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
982#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700983#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700984
985void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700986 // Ensure that we fix up branches.
987 __ FinalizeCode();
988
989 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100990 StackMapStream* stack_map_stream = GetStackMapStream();
991 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
David Srbeckyd02b23f2018-05-29 23:27:22 +0100992 uint32_t old_position = stack_map_stream->GetStackMapNativePcOffset(i);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700993 uint32_t new_position = __ GetAdjustedPosition(old_position);
994 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100995 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700996 }
997
998 // Adjust pc offsets for the disassembly information.
999 if (disasm_info_ != nullptr) {
1000 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1001 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1002 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1003 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1004 it.second.start = __ GetAdjustedPosition(it.second.start);
1005 it.second.end = __ GetAdjustedPosition(it.second.end);
1006 }
1007 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1008 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1009 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1010 }
1011 }
1012
Alexey Frunze4dda3372015-06-01 18:31:49 -07001013 CodeGenerator::Finalize(allocator);
1014}
1015
1016Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1017 return codegen_->GetAssembler();
1018}
1019
1020void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001021 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001022 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1023}
1024
1025void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001026 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001027 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1028}
1029
1030void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1031 // Pop reg
1032 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001033 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034}
1035
1036void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1037 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001038 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001039 __ Sd(GpuRegister(reg), SP, 0);
1040}
1041
1042void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1043 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1044 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1045 // Allocate a scratch register other than TMP, if available.
1046 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1047 // automatically unspilled when the scratch scope object is destroyed).
1048 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1049 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001050 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001051 __ LoadFromOffset(load_type,
1052 GpuRegister(ensure_scratch.GetRegister()),
1053 SP,
1054 index1 + stack_offset);
1055 __ LoadFromOffset(load_type,
1056 TMP,
1057 SP,
1058 index2 + stack_offset);
1059 __ StoreToOffset(store_type,
1060 GpuRegister(ensure_scratch.GetRegister()),
1061 SP,
1062 index2 + stack_offset);
1063 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1064}
1065
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001066void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1067 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1068 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1069 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1070 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1071}
1072
Alexey Frunze4dda3372015-06-01 18:31:49 -07001073static dwarf::Reg DWARFReg(GpuRegister reg) {
1074 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1075}
1076
David Srbeckyba702002016-02-01 18:15:29 +00001077static dwarf::Reg DWARFReg(FpuRegister reg) {
1078 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1079}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001080
1081void CodeGeneratorMIPS64::GenerateFrameEntry() {
1082 __ Bind(&frame_entry_label_);
1083
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001084 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01001085 __ Lhu(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
1086 __ Addiu(TMP, TMP, 1);
1087 __ Sh(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001088 }
1089
Vladimir Marko33bff252017-11-01 14:35:42 +00001090 bool do_overflow_check =
1091 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001092
1093 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001094 __ LoadFromOffset(
1095 kLoadWord,
1096 ZERO,
1097 SP,
1098 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001099 RecordPcInfo(nullptr, 0);
1100 }
1101
Alexey Frunze4dda3372015-06-01 18:31:49 -07001102 if (HasEmptyFrame()) {
1103 return;
1104 }
1105
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001106 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001107 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1108 LOG(FATAL) << "Stack frame larger than "
1109 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001110 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111
1112 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001114 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001115 __ IncreaseFrameSize(ofs);
1116
1117 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1118 GpuRegister reg = kCoreCalleeSaves[i];
1119 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001120 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001121 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001122 __ cfi().RelOffset(DWARFReg(reg), ofs);
1123 }
1124 }
1125
1126 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1127 FpuRegister reg = kFpuCalleeSaves[i];
1128 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001129 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001130 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001131 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001132 }
1133 }
1134
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001135 // Save the current method if we need it. Note that we do not
1136 // do this in HCurrentMethod, as the instruction might have been removed
1137 // in the SSA graph.
1138 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001139 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001140 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001141
1142 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1143 // Initialize should_deoptimize flag to 0.
1144 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1145 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001146}
1147
1148void CodeGeneratorMIPS64::GenerateFrameExit() {
1149 __ cfi().RememberState();
1150
Alexey Frunze4dda3372015-06-01 18:31:49 -07001151 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001152 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001153
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001154 // For better instruction scheduling restore RA before other registers.
1155 uint32_t ofs = GetFrameSize();
1156 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001157 GpuRegister reg = kCoreCalleeSaves[i];
1158 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001159 ofs -= kMips64DoublewordSize;
1160 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001161 __ cfi().Restore(DWARFReg(reg));
1162 }
1163 }
1164
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001165 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1166 FpuRegister reg = kFpuCalleeSaves[i];
1167 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1168 ofs -= kMips64DoublewordSize;
1169 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1170 __ cfi().Restore(DWARFReg(reg));
1171 }
1172 }
1173
1174 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001175 }
1176
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001177 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001178
1179 __ cfi().RestoreState();
1180 __ cfi().DefCFAOffset(GetFrameSize());
1181}
1182
1183void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1184 __ Bind(GetLabelOf(block));
1185}
1186
1187void CodeGeneratorMIPS64::MoveLocation(Location destination,
1188 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001189 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001190 if (source.Equals(destination)) {
1191 return;
1192 }
1193
1194 // A valid move can always be inferred from the destination and source
1195 // locations. When moving from and to a register, the argument type can be
1196 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001197 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001198 DCHECK_EQ(unspecified_type, false);
1199
1200 if (destination.IsRegister() || destination.IsFpuRegister()) {
1201 if (unspecified_type) {
1202 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1203 if (source.IsStackSlot() ||
1204 (src_cst != nullptr && (src_cst->IsIntConstant()
1205 || src_cst->IsFloatConstant()
1206 || src_cst->IsNullConstant()))) {
1207 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001208 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001209 } else {
1210 // If the source is a double stack slot or a 64bit constant, a 64bit
1211 // type is appropriate. Else the source is a register, and since the
1212 // type has not been specified, we chose a 64bit type to force a 64bit
1213 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001214 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001215 }
1216 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001217 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1218 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1220 // Move to GPR/FPR from stack
1221 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001222 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001223 __ LoadFpuFromOffset(load_type,
1224 destination.AsFpuRegister<FpuRegister>(),
1225 SP,
1226 source.GetStackIndex());
1227 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001228 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001229 __ LoadFromOffset(load_type,
1230 destination.AsRegister<GpuRegister>(),
1231 SP,
1232 source.GetStackIndex());
1233 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001234 } else if (source.IsSIMDStackSlot()) {
1235 __ LoadFpuFromOffset(kLoadQuadword,
1236 destination.AsFpuRegister<FpuRegister>(),
1237 SP,
1238 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001239 } else if (source.IsConstant()) {
1240 // Move to GPR/FPR from constant
1241 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001242 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001243 gpr = destination.AsRegister<GpuRegister>();
1244 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001245 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001246 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001247 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001248 gpr = ZERO;
1249 } else {
1250 __ LoadConst32(gpr, value);
1251 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001252 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001253 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001254 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001255 gpr = ZERO;
1256 } else {
1257 __ LoadConst64(gpr, value);
1258 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001259 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001260 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001261 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001262 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001263 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1264 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001266 if (destination.IsRegister()) {
1267 // Move to GPR from GPR
1268 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1269 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001270 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001271 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001272 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1273 } else {
1274 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1275 }
1276 }
1277 } else if (source.IsFpuRegister()) {
1278 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001279 if (GetGraph()->HasSIMD()) {
1280 __ MoveV(VectorRegisterFrom(destination),
1281 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001282 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001283 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001284 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001285 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1286 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001287 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001288 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1289 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001290 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001291 } else {
1292 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001293 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001294 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1295 } else {
1296 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1297 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001298 }
1299 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001300 } else if (destination.IsSIMDStackSlot()) {
1301 if (source.IsFpuRegister()) {
1302 __ StoreFpuToOffset(kStoreQuadword,
1303 source.AsFpuRegister<FpuRegister>(),
1304 SP,
1305 destination.GetStackIndex());
1306 } else {
1307 DCHECK(source.IsSIMDStackSlot());
1308 __ LoadFpuFromOffset(kLoadQuadword,
1309 FTMP,
1310 SP,
1311 source.GetStackIndex());
1312 __ StoreFpuToOffset(kStoreQuadword,
1313 FTMP,
1314 SP,
1315 destination.GetStackIndex());
1316 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001317 } else { // The destination is not a register. It must be a stack slot.
1318 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1319 if (source.IsRegister() || source.IsFpuRegister()) {
1320 if (unspecified_type) {
1321 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001323 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001324 dst_type =
1325 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001326 }
1327 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001328 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1329 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001330 // Move to stack from GPR/FPR
1331 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1332 if (source.IsRegister()) {
1333 __ StoreToOffset(store_type,
1334 source.AsRegister<GpuRegister>(),
1335 SP,
1336 destination.GetStackIndex());
1337 } else {
1338 __ StoreFpuToOffset(store_type,
1339 source.AsFpuRegister<FpuRegister>(),
1340 SP,
1341 destination.GetStackIndex());
1342 }
1343 } else if (source.IsConstant()) {
1344 // Move to stack from constant
1345 HConstant* src_cst = source.GetConstant();
1346 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001347 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001348 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001349 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1350 if (value != 0) {
1351 gpr = TMP;
1352 __ LoadConst32(gpr, value);
1353 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001354 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001355 DCHECK(destination.IsDoubleStackSlot());
1356 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1357 if (value != 0) {
1358 gpr = TMP;
1359 __ LoadConst64(gpr, value);
1360 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001361 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001362 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001363 } else {
1364 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1365 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1366 // Move to stack from stack
1367 if (destination.IsStackSlot()) {
1368 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1369 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1370 } else {
1371 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1372 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1373 }
1374 }
1375 }
1376}
1377
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001378void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001379 DCHECK(!loc1.IsConstant());
1380 DCHECK(!loc2.IsConstant());
1381
1382 if (loc1.Equals(loc2)) {
1383 return;
1384 }
1385
1386 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1387 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001388 bool is_simd1 = loc1.IsSIMDStackSlot();
1389 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001390 bool is_fp_reg1 = loc1.IsFpuRegister();
1391 bool is_fp_reg2 = loc2.IsFpuRegister();
1392
1393 if (loc2.IsRegister() && loc1.IsRegister()) {
1394 // Swap 2 GPRs
1395 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1396 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1397 __ Move(TMP, r2);
1398 __ Move(r2, r1);
1399 __ Move(r1, TMP);
1400 } else if (is_fp_reg2 && is_fp_reg1) {
1401 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001402 if (GetGraph()->HasSIMD()) {
1403 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1404 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1405 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001406 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001407 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1408 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1409 if (type == DataType::Type::kFloat32) {
1410 __ MovS(FTMP, r1);
1411 __ MovS(r1, r2);
1412 __ MovS(r2, FTMP);
1413 } else {
1414 DCHECK_EQ(type, DataType::Type::kFloat64);
1415 __ MovD(FTMP, r1);
1416 __ MovD(r1, r2);
1417 __ MovD(r2, FTMP);
1418 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001419 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001420 } else if (is_slot1 != is_slot2) {
1421 // Swap GPR/FPR and stack slot
1422 Location reg_loc = is_slot1 ? loc2 : loc1;
1423 Location mem_loc = is_slot1 ? loc1 : loc2;
1424 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1425 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001426 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001427 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1428 if (reg_loc.IsFpuRegister()) {
1429 __ StoreFpuToOffset(store_type,
1430 reg_loc.AsFpuRegister<FpuRegister>(),
1431 SP,
1432 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001433 if (mem_loc.IsStackSlot()) {
1434 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1435 } else {
1436 DCHECK(mem_loc.IsDoubleStackSlot());
1437 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1438 }
1439 } else {
1440 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1441 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1442 }
1443 } else if (is_slot1 && is_slot2) {
1444 move_resolver_.Exchange(loc1.GetStackIndex(),
1445 loc2.GetStackIndex(),
1446 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001447 } else if (is_simd1 && is_simd2) {
1448 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1449 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1450 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1451 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1452 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1453 __ StoreFpuToOffset(kStoreQuadword,
1454 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1455 SP,
1456 mem_loc.GetStackIndex());
1457 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001458 } else {
1459 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1460 }
1461}
1462
Calin Juravle175dc732015-08-25 15:42:32 +01001463void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1464 DCHECK(location.IsRegister());
1465 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1466}
1467
Calin Juravlee460d1d2015-09-29 04:52:17 +01001468void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1469 if (location.IsRegister()) {
1470 locations->AddTemp(location);
1471 } else {
1472 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1473 }
1474}
1475
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001476void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1477 GpuRegister value,
1478 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001479 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001480 GpuRegister card = AT;
1481 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001482 if (value_can_be_null) {
1483 __ Beqzc(value, &done);
1484 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001485 __ LoadFromOffset(kLoadDoubleword,
1486 card,
1487 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001488 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001489 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1490 __ Daddu(temp, card, temp);
1491 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001492 if (value_can_be_null) {
1493 __ Bind(&done);
1494 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001495}
1496
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001497template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001498inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1499 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001500 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001501 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001502 const DexFile* dex_file = info.target_dex_file;
Alexey Frunze19f6c692016-11-30 19:19:55 -08001503 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001504 DCHECK(info.label.IsBound());
1505 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1506 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1507 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001508 linker_patches->push_back(Factory(literal_offset, dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001509 }
1510}
1511
Vladimir Marko6fd16062018-06-26 11:02:04 +01001512template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1513linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1514 const DexFile* target_dex_file,
1515 uint32_t pc_insn_offset,
1516 uint32_t boot_image_offset) {
1517 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1518 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001519}
1520
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001521void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001522 DCHECK(linker_patches->empty());
1523 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001524 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001525 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001526 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001527 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001528 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001529 string_bss_entry_patches_.size() +
1530 boot_image_intrinsic_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001531 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001532 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001533 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001534 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001535 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001536 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001537 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001538 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001539 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1540 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001541 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001542 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00001543 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001544 DCHECK(boot_image_type_patches_.empty());
1545 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001546 DCHECK(boot_image_intrinsic_patches_.empty());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001547 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001548 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1549 method_bss_entry_patches_, linker_patches);
1550 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1551 type_bss_entry_patches_, linker_patches);
1552 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1553 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001554 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001555}
1556
Vladimir Marko6fd16062018-06-26 11:02:04 +01001557CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageIntrinsicPatch(
1558 uint32_t intrinsic_data,
1559 const PcRelativePatchInfo* info_high) {
1560 return NewPcRelativePatch(
1561 /* dex_file */ nullptr, intrinsic_data, info_high, &boot_image_intrinsic_patches_);
1562}
1563
Vladimir Markob066d432018-01-03 13:14:37 +00001564CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageRelRoPatch(
1565 uint32_t boot_image_offset,
1566 const PcRelativePatchInfo* info_high) {
1567 return NewPcRelativePatch(
1568 /* dex_file */ nullptr, boot_image_offset, info_high, &boot_image_method_patches_);
1569}
1570
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001571CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001572 MethodReference target_method,
1573 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001574 return NewPcRelativePatch(
1575 target_method.dex_file, target_method.index, info_high, &boot_image_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001576}
1577
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001578CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001579 MethodReference target_method,
1580 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001581 return NewPcRelativePatch(
1582 target_method.dex_file, target_method.index, info_high, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001583}
1584
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001585CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001586 const DexFile& dex_file,
1587 dex::TypeIndex type_index,
1588 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001589 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &boot_image_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001590}
1591
Vladimir Marko1998cd02017-01-13 13:02:58 +00001592CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001593 const DexFile& dex_file,
1594 dex::TypeIndex type_index,
1595 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001596 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001597}
1598
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001599CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001600 const DexFile& dex_file,
1601 dex::StringIndex string_index,
1602 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001603 return NewPcRelativePatch(
1604 &dex_file, string_index.index_, info_high, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001605}
1606
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001607CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1608 const DexFile& dex_file,
1609 dex::StringIndex string_index,
1610 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001611 return NewPcRelativePatch(&dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001612}
1613
Alexey Frunze19f6c692016-11-30 19:19:55 -08001614CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001615 const DexFile* dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001616 uint32_t offset_or_index,
1617 const PcRelativePatchInfo* info_high,
1618 ArenaDeque<PcRelativePatchInfo>* patches) {
1619 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001620 return &patches->back();
1621}
1622
Alexey Frunzef63f5692016-12-13 17:43:11 -08001623Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1624 return map->GetOrCreate(
1625 value,
1626 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1627}
1628
Alexey Frunze19f6c692016-11-30 19:19:55 -08001629Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1630 return uint64_literals_.GetOrCreate(
1631 value,
1632 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1633}
1634
Alexey Frunzef63f5692016-12-13 17:43:11 -08001635Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001636 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001637}
1638
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001639void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1640 GpuRegister out,
1641 PcRelativePatchInfo* info_low) {
1642 DCHECK(!info_high->patch_info_high);
1643 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001644 // Add the high half of a 32-bit offset to PC.
1645 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001646 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001647 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001648 if (info_low != nullptr) {
1649 DCHECK_EQ(info_low->patch_info_high, info_high);
1650 __ Bind(&info_low->label);
1651 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001652}
1653
Vladimir Marko6fd16062018-06-26 11:02:04 +01001654void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_reference) {
1655 if (GetCompilerOptions().IsBootImage()) {
1656 PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference);
1657 PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high);
1658 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1659 __ Daddiu(reg, AT, /* placeholder */ 0x5678);
1660 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01001661 DCHECK(Runtime::Current()->IsAotCompiler());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001662 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_reference);
1663 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_reference, info_high);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001664 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1665 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1666 __ Lwu(reg, AT, /* placeholder */ 0x5678);
1667 } else {
1668 gc::Heap* heap = Runtime::Current()->GetHeap();
1669 DCHECK(!heap->GetBootImageSpaces().empty());
1670 uintptr_t address =
Vladimir Marko6fd16062018-06-26 11:02:04 +01001671 reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001672 __ LoadLiteral(reg, kLoadDoubleword, DeduplicateBootImageAddressLiteral(address));
1673 }
1674}
1675
Vladimir Marko6fd16062018-06-26 11:02:04 +01001676void CodeGeneratorMIPS64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1677 uint32_t boot_image_offset) {
1678 DCHECK(invoke->IsStatic());
1679 InvokeRuntimeCallingConvention calling_convention;
1680 GpuRegister argument = calling_convention.GetRegisterAt(0);
1681 if (GetCompilerOptions().IsBootImage()) {
1682 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1683 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1684 MethodReference target_method = invoke->GetTargetMethod();
1685 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1686 PcRelativePatchInfo* info_high = NewBootImageTypePatch(*target_method.dex_file, type_idx);
1687 PcRelativePatchInfo* info_low =
1688 NewBootImageTypePatch(*target_method.dex_file, type_idx, info_high);
1689 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1690 __ Daddiu(argument, AT, /* placeholder */ 0x5678);
1691 } else {
1692 LoadBootImageAddress(argument, boot_image_offset);
1693 }
1694 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1695 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1696}
1697
Alexey Frunze627c1a02017-01-30 19:28:14 -08001698Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1699 dex::StringIndex string_index,
1700 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001701 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001702 return jit_string_patches_.GetOrCreate(
1703 StringReference(&dex_file, string_index),
1704 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1705}
1706
1707Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1708 dex::TypeIndex type_index,
1709 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001710 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001711 return jit_class_patches_.GetOrCreate(
1712 TypeReference(&dex_file, type_index),
1713 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1714}
1715
1716void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1717 const uint8_t* roots_data,
1718 const Literal* literal,
1719 uint64_t index_in_table) const {
1720 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1721 uintptr_t address =
1722 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1723 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1724}
1725
1726void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1727 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001728 const StringReference& string_reference = entry.first;
1729 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001730 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001731 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001732 }
1733 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001734 const TypeReference& type_reference = entry.first;
1735 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001736 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001737 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001738 }
1739}
1740
David Brazdil58282f42016-01-14 12:45:10 +00001741void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001742 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1743 blocked_core_registers_[ZERO] = true;
1744 blocked_core_registers_[K0] = true;
1745 blocked_core_registers_[K1] = true;
1746 blocked_core_registers_[GP] = true;
1747 blocked_core_registers_[SP] = true;
1748 blocked_core_registers_[RA] = true;
1749
Lazar Trsicd9672662015-09-03 17:33:01 +02001750 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1751 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001752 blocked_core_registers_[AT] = true;
1753 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001754 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001755 blocked_fpu_registers_[FTMP] = true;
1756
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001757 if (GetInstructionSetFeatures().HasMsa()) {
1758 // To be used just for MSA instructions.
1759 blocked_fpu_registers_[FTMP2] = true;
1760 }
1761
Alexey Frunze4dda3372015-06-01 18:31:49 -07001762 // Reserve suspend and thread registers.
1763 blocked_core_registers_[S0] = true;
1764 blocked_core_registers_[TR] = true;
1765
1766 // Reserve T9 for function calls
1767 blocked_core_registers_[T9] = true;
1768
Goran Jakovljevic782be112016-06-21 12:39:04 +02001769 if (GetGraph()->IsDebuggable()) {
1770 // Stubs do not save callee-save floating point registers. If the graph
1771 // is debuggable, we need to deal with these registers differently. For
1772 // now, just block them.
1773 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1774 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1775 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001776 }
1777}
1778
Alexey Frunze4dda3372015-06-01 18:31:49 -07001779size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1780 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001781 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001782}
1783
1784size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1785 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001786 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001787}
1788
1789size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001790 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1791 FpuRegister(reg_id),
1792 SP,
1793 stack_index);
1794 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001795}
1796
1797size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001798 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1799 FpuRegister(reg_id),
1800 SP,
1801 stack_index);
1802 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001803}
1804
1805void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001806 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001807}
1808
1809void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001810 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001811}
1812
Vladimir Markoa0431112018-06-25 09:32:54 +01001813const Mips64InstructionSetFeatures& CodeGeneratorMIPS64::GetInstructionSetFeatures() const {
1814 return *GetCompilerOptions().GetInstructionSetFeatures()->AsMips64InstructionSetFeatures();
1815}
1816
Calin Juravle175dc732015-08-25 15:42:32 +01001817void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001818 HInstruction* instruction,
1819 uint32_t dex_pc,
1820 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001821 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001822 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001823 if (EntrypointRequiresStackMap(entrypoint)) {
1824 RecordPcInfo(instruction, dex_pc, slow_path);
1825 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001826}
1827
Alexey Frunze15958152017-02-09 19:08:30 -08001828void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1829 HInstruction* instruction,
1830 SlowPathCode* slow_path) {
1831 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1832 GenerateInvokeRuntime(entry_point_offset);
1833}
1834
1835void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1836 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1837 __ Jalr(T9);
1838 __ Nop();
1839}
1840
Alexey Frunze4dda3372015-06-01 18:31:49 -07001841void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1842 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001843 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1844 const size_t status_byte_offset =
1845 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1846 constexpr uint32_t shifted_initialized_value =
1847 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1848
1849 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
Lena Djokic3177e102018-02-28 11:32:40 +01001850 __ Sltiu(TMP, TMP, shifted_initialized_value);
1851 __ Bnezc(TMP, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001852 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1853 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001854 __ Bind(slow_path->GetExitLabel());
1855}
1856
Vladimir Marko175e7862018-03-27 09:03:13 +00001857void InstructionCodeGeneratorMIPS64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
1858 GpuRegister temp) {
1859 uint32_t path_to_root = check->GetBitstringPathToRoot();
1860 uint32_t mask = check->GetBitstringMask();
1861 DCHECK(IsPowerOfTwo(mask + 1));
1862 size_t mask_bits = WhichPowerOf2(mask + 1);
1863
1864 if (mask_bits == 16u) {
1865 // Load only the bitstring part of the status word.
1866 __ LoadFromOffset(
1867 kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
1868 // Compare the bitstring bits using XOR.
1869 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1870 } else {
1871 // /* uint32_t */ temp = temp->status_
1872 __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
1873 // Compare the bitstring bits using XOR.
1874 if (IsUint<16>(path_to_root)) {
1875 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1876 } else {
1877 __ LoadConst32(TMP, path_to_root);
1878 __ Xor(temp, temp, TMP);
1879 }
1880 // Shift out bits that do not contribute to the comparison.
1881 __ Sll(temp, temp, 32 - mask_bits);
1882 }
1883}
1884
Alexey Frunze4dda3372015-06-01 18:31:49 -07001885void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1886 __ Sync(0); // only stype 0 is supported
1887}
1888
1889void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1890 HBasicBlock* successor) {
1891 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001892 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1893
1894 if (slow_path == nullptr) {
1895 slow_path =
1896 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1897 instruction->SetSlowPath(slow_path);
1898 codegen_->AddSlowPath(slow_path);
1899 if (successor != nullptr) {
1900 DCHECK(successor->IsLoopHeader());
1901 }
1902 } else {
1903 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1904 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001905
1906 __ LoadFromOffset(kLoadUnsignedHalfword,
1907 TMP,
1908 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001909 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001910 if (successor == nullptr) {
1911 __ Bnezc(TMP, slow_path->GetEntryLabel());
1912 __ Bind(slow_path->GetReturnLabel());
1913 } else {
1914 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001915 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001916 // slow_path will return to GetLabelOf(successor).
1917 }
1918}
1919
1920InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1921 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001922 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001923 assembler_(codegen->GetAssembler()),
1924 codegen_(codegen) {}
1925
1926void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1927 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001928 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001929 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001930 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001931 case DataType::Type::kInt32:
1932 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001933 locations->SetInAt(0, Location::RequiresRegister());
1934 HInstruction* right = instruction->InputAt(1);
1935 bool can_use_imm = false;
1936 if (right->IsConstant()) {
1937 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1938 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1939 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001940 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001941 DCHECK(instruction->IsAdd() || instruction->IsSub());
1942 bool single_use = right->GetUses().HasExactlyOneElement();
1943 if (instruction->IsSub()) {
1944 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1945 imm = -imm;
1946 }
1947 }
1948 if (type == DataType::Type::kInt32) {
1949 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1950 } else {
1951 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1952 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001953 }
1954 }
1955 if (can_use_imm)
1956 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1957 else
1958 locations->SetInAt(1, Location::RequiresRegister());
1959 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1960 }
1961 break;
1962
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001963 case DataType::Type::kFloat32:
1964 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001965 locations->SetInAt(0, Location::RequiresFpuRegister());
1966 locations->SetInAt(1, Location::RequiresFpuRegister());
1967 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1968 break;
1969
1970 default:
1971 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1972 }
1973}
1974
1975void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001976 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001977 LocationSummary* locations = instruction->GetLocations();
1978
1979 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001980 case DataType::Type::kInt32:
1981 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001982 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1983 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1984 Location rhs_location = locations->InAt(1);
1985
1986 GpuRegister rhs_reg = ZERO;
1987 int64_t rhs_imm = 0;
1988 bool use_imm = rhs_location.IsConstant();
1989 if (use_imm) {
1990 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1991 } else {
1992 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1993 }
1994
1995 if (instruction->IsAnd()) {
1996 if (use_imm)
1997 __ Andi(dst, lhs, rhs_imm);
1998 else
1999 __ And(dst, lhs, rhs_reg);
2000 } else if (instruction->IsOr()) {
2001 if (use_imm)
2002 __ Ori(dst, lhs, rhs_imm);
2003 else
2004 __ Or(dst, lhs, rhs_reg);
2005 } else if (instruction->IsXor()) {
2006 if (use_imm)
2007 __ Xori(dst, lhs, rhs_imm);
2008 else
2009 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002010 } else if (instruction->IsAdd() || instruction->IsSub()) {
2011 if (instruction->IsSub()) {
2012 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002013 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002014 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01002015 if (use_imm) {
2016 if (IsInt<16>(rhs_imm)) {
2017 __ Addiu(dst, lhs, rhs_imm);
2018 } else {
2019 int16_t rhs_imm_high = High16Bits(rhs_imm);
2020 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2021 if (rhs_imm_low < 0) {
2022 rhs_imm_high += 1;
2023 }
2024 __ Aui(dst, lhs, rhs_imm_high);
2025 if (rhs_imm_low != 0) {
2026 __ Addiu(dst, dst, rhs_imm_low);
2027 }
2028 }
2029 } else {
2030 if (instruction->IsAdd()) {
2031 __ Addu(dst, lhs, rhs_reg);
2032 } else {
2033 DCHECK(instruction->IsSub());
2034 __ Subu(dst, lhs, rhs_reg);
2035 }
2036 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002037 } else {
Lena Djokic38530172017-11-16 11:11:50 +01002038 if (use_imm) {
2039 if (IsInt<16>(rhs_imm)) {
2040 __ Daddiu(dst, lhs, rhs_imm);
2041 } else if (IsInt<32>(rhs_imm)) {
2042 int16_t rhs_imm_high = High16Bits(rhs_imm);
2043 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2044 bool overflow_hi16 = false;
2045 if (rhs_imm_low < 0) {
2046 rhs_imm_high += 1;
2047 overflow_hi16 = (rhs_imm_high == -32768);
2048 }
2049 __ Daui(dst, lhs, rhs_imm_high);
2050 if (rhs_imm_low != 0) {
2051 __ Daddiu(dst, dst, rhs_imm_low);
2052 }
2053 if (overflow_hi16) {
2054 __ Dahi(dst, 1);
2055 }
2056 } else {
2057 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
2058 if (rhs_imm_low < 0) {
2059 rhs_imm += (INT64_C(1) << 16);
2060 }
2061 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
2062 if (rhs_imm_upper < 0) {
2063 rhs_imm += (INT64_C(1) << 32);
2064 }
2065 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
2066 if (rhs_imm_high < 0) {
2067 rhs_imm += (INT64_C(1) << 48);
2068 }
2069 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
2070 GpuRegister tmp = lhs;
2071 if (rhs_imm_low != 0) {
2072 __ Daddiu(dst, tmp, rhs_imm_low);
2073 tmp = dst;
2074 }
2075 // Dahi and Dati must use the same input and output register, so we have to initialize
2076 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
2077 // Daui(dst, lhs, 0).
2078 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
2079 __ Daui(dst, tmp, rhs_imm_upper);
2080 }
2081 if (rhs_imm_high != 0) {
2082 __ Dahi(dst, rhs_imm_high);
2083 }
2084 if (rhs_imm_top != 0) {
2085 __ Dati(dst, rhs_imm_top);
2086 }
2087 }
2088 } else if (instruction->IsAdd()) {
2089 __ Daddu(dst, lhs, rhs_reg);
2090 } else {
2091 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002092 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002093 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002094 }
2095 }
2096 break;
2097 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002098 case DataType::Type::kFloat32:
2099 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002100 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2101 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2102 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2103 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002104 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002105 __ AddS(dst, lhs, rhs);
2106 else
2107 __ AddD(dst, lhs, rhs);
2108 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002109 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002110 __ SubS(dst, lhs, rhs);
2111 else
2112 __ SubD(dst, lhs, rhs);
2113 } else {
2114 LOG(FATAL) << "Unexpected floating-point binary operation";
2115 }
2116 break;
2117 }
2118 default:
2119 LOG(FATAL) << "Unexpected binary operation type " << type;
2120 }
2121}
2122
2123void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002124 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002125
Vladimir Markoca6fff82017-10-03 14:49:14 +01002126 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002127 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002128 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002129 case DataType::Type::kInt32:
2130 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002131 locations->SetInAt(0, Location::RequiresRegister());
2132 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002133 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002134 break;
2135 }
2136 default:
2137 LOG(FATAL) << "Unexpected shift type " << type;
2138 }
2139}
2140
2141void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002142 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002143 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002145
2146 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002147 case DataType::Type::kInt32:
2148 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002149 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2150 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2151 Location rhs_location = locations->InAt(1);
2152
2153 GpuRegister rhs_reg = ZERO;
2154 int64_t rhs_imm = 0;
2155 bool use_imm = rhs_location.IsConstant();
2156 if (use_imm) {
2157 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2158 } else {
2159 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2160 }
2161
2162 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002163 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002164 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165
Alexey Frunze92d90602015-12-18 18:16:36 -08002166 if (shift_value == 0) {
2167 if (dst != lhs) {
2168 __ Move(dst, lhs);
2169 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002170 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002171 if (instr->IsShl()) {
2172 __ Sll(dst, lhs, shift_value);
2173 } else if (instr->IsShr()) {
2174 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002175 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002176 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002177 } else {
2178 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002179 }
2180 } else {
2181 if (shift_value < 32) {
2182 if (instr->IsShl()) {
2183 __ Dsll(dst, lhs, shift_value);
2184 } else if (instr->IsShr()) {
2185 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002186 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002187 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002188 } else {
2189 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002190 }
2191 } else {
2192 shift_value -= 32;
2193 if (instr->IsShl()) {
2194 __ Dsll32(dst, lhs, shift_value);
2195 } else if (instr->IsShr()) {
2196 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002197 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002199 } else {
2200 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002201 }
2202 }
2203 }
2204 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002205 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002206 if (instr->IsShl()) {
2207 __ Sllv(dst, lhs, rhs_reg);
2208 } else if (instr->IsShr()) {
2209 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002210 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002211 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002212 } else {
2213 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002214 }
2215 } else {
2216 if (instr->IsShl()) {
2217 __ Dsllv(dst, lhs, rhs_reg);
2218 } else if (instr->IsShr()) {
2219 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002220 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002221 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002222 } else {
2223 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002224 }
2225 }
2226 }
2227 break;
2228 }
2229 default:
2230 LOG(FATAL) << "Unexpected shift operation type " << type;
2231 }
2232}
2233
2234void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2235 HandleBinaryOp(instruction);
2236}
2237
2238void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2239 HandleBinaryOp(instruction);
2240}
2241
2242void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2243 HandleBinaryOp(instruction);
2244}
2245
2246void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2247 HandleBinaryOp(instruction);
2248}
2249
2250void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002251 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002252 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002253 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002254 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002255 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2256 object_array_get_with_read_barrier
2257 ? LocationSummary::kCallOnSlowPath
2258 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002259 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2260 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2261 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002262 locations->SetInAt(0, Location::RequiresRegister());
2263 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002264 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002265 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2266 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002267 // The output overlaps in the case of an object array get with
2268 // read barriers enabled: we do not want the move to overwrite the
2269 // array's location, as we need it to emit the read barrier.
2270 locations->SetOut(Location::RequiresRegister(),
2271 object_array_get_with_read_barrier
2272 ? Location::kOutputOverlap
2273 : Location::kNoOutputOverlap);
2274 }
2275 // We need a temporary register for the read barrier marking slow
2276 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2277 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002278 bool temp_needed = instruction->GetIndex()->IsConstant()
2279 ? !kBakerReadBarrierThunksEnableForFields
2280 : !kBakerReadBarrierThunksEnableForArrays;
2281 if (temp_needed) {
2282 locations->AddTemp(Location::RequiresRegister());
2283 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002284 }
2285}
2286
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002287static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2288 auto null_checker = [codegen, instruction]() {
2289 codegen->MaybeRecordImplicitNullCheck(instruction);
2290 };
2291 return null_checker;
2292}
2293
Alexey Frunze4dda3372015-06-01 18:31:49 -07002294void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2295 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002296 Location obj_loc = locations->InAt(0);
2297 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2298 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002299 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002300 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002301 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002302
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002303 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002304 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2305 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002306 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002307 case DataType::Type::kBool:
2308 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002309 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310 if (index.IsConstant()) {
2311 size_t offset =
2312 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002313 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002314 } else {
2315 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002316 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002317 }
2318 break;
2319 }
2320
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002321 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002322 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002323 if (index.IsConstant()) {
2324 size_t offset =
2325 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002326 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002327 } else {
2328 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002329 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002330 }
2331 break;
2332 }
2333
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002334 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002335 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002336 if (maybe_compressed_char_at) {
2337 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002338 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002339 __ Dext(TMP, TMP, 0, 1);
2340 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2341 "Expecting 0=compressed, 1=uncompressed");
2342 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002343 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002344 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2345 if (maybe_compressed_char_at) {
2346 Mips64Label uncompressed_load, done;
2347 __ Bnezc(TMP, &uncompressed_load);
2348 __ LoadFromOffset(kLoadUnsignedByte,
2349 out,
2350 obj,
2351 data_offset + (const_index << TIMES_1));
2352 __ Bc(&done);
2353 __ Bind(&uncompressed_load);
2354 __ LoadFromOffset(kLoadUnsignedHalfword,
2355 out,
2356 obj,
2357 data_offset + (const_index << TIMES_2));
2358 __ Bind(&done);
2359 } else {
2360 __ LoadFromOffset(kLoadUnsignedHalfword,
2361 out,
2362 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002363 data_offset + (const_index << TIMES_2),
2364 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002365 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002366 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002367 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2368 if (maybe_compressed_char_at) {
2369 Mips64Label uncompressed_load, done;
2370 __ Bnezc(TMP, &uncompressed_load);
2371 __ Daddu(TMP, obj, index_reg);
2372 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2373 __ Bc(&done);
2374 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002375 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002376 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2377 __ Bind(&done);
2378 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002379 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002380 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002381 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002382 }
2383 break;
2384 }
2385
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002386 case DataType::Type::kInt16: {
2387 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2388 if (index.IsConstant()) {
2389 size_t offset =
2390 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2391 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2392 } else {
2393 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2394 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2395 }
2396 break;
2397 }
2398
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002399 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002400 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002401 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 LoadOperandType load_type =
2403 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002404 if (index.IsConstant()) {
2405 size_t offset =
2406 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002407 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002408 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002409 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002410 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002411 }
2412 break;
2413 }
2414
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002415 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002416 static_assert(
2417 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2418 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2419 // /* HeapReference<Object> */ out =
2420 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2421 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002422 bool temp_needed = index.IsConstant()
2423 ? !kBakerReadBarrierThunksEnableForFields
2424 : !kBakerReadBarrierThunksEnableForArrays;
2425 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002426 // Note that a potential implicit null check is handled in this
2427 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002428 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2429 if (index.IsConstant()) {
2430 // Array load with a constant index can be treated as a field load.
2431 size_t offset =
2432 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2433 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2434 out_loc,
2435 obj,
2436 offset,
2437 temp,
2438 /* needs_null_check */ false);
2439 } else {
2440 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2441 out_loc,
2442 obj,
2443 data_offset,
2444 index,
2445 temp,
2446 /* needs_null_check */ false);
2447 }
Alexey Frunze15958152017-02-09 19:08:30 -08002448 } else {
2449 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2450 if (index.IsConstant()) {
2451 size_t offset =
2452 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2453 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2454 // If read barriers are enabled, emit read barriers other than
2455 // Baker's using a slow path (and also unpoison the loaded
2456 // reference, if heap poisoning is enabled).
2457 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2458 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002459 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002460 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2461 // If read barriers are enabled, emit read barriers other than
2462 // Baker's using a slow path (and also unpoison the loaded
2463 // reference, if heap poisoning is enabled).
2464 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2465 out_loc,
2466 out_loc,
2467 obj_loc,
2468 data_offset,
2469 index);
2470 }
2471 }
2472 break;
2473 }
2474
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002475 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002476 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002477 if (index.IsConstant()) {
2478 size_t offset =
2479 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002480 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002481 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002482 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002483 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002484 }
2485 break;
2486 }
2487
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002488 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002489 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002490 if (index.IsConstant()) {
2491 size_t offset =
2492 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002493 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002494 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002495 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002496 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002497 }
2498 break;
2499 }
2500
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002501 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002502 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002503 if (index.IsConstant()) {
2504 size_t offset =
2505 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002506 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002507 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002508 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002509 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002510 }
2511 break;
2512 }
2513
Aart Bik66c158e2018-01-31 12:55:04 -08002514 case DataType::Type::kUint32:
2515 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002516 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002517 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2518 UNREACHABLE();
2519 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002520}
2521
2522void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002523 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002524 locations->SetInAt(0, Location::RequiresRegister());
2525 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2526}
2527
2528void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2529 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002530 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002531 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2532 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2533 __ LoadFromOffset(kLoadWord, out, obj, offset);
2534 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002535 // Mask out compression flag from String's array length.
2536 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2537 __ Srl(out, out, 1u);
2538 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002539}
2540
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002541Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2542 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2543 ? Location::ConstantLocation(instruction->AsConstant())
2544 : Location::RequiresRegister();
2545}
2546
2547Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2548 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2549 // We can store a non-zero float or double constant without first loading it into the FPU,
2550 // but we should only prefer this if the constant has a single use.
2551 if (instruction->IsConstant() &&
2552 (instruction->AsConstant()->IsZeroBitPattern() ||
2553 instruction->GetUses().HasExactlyOneElement())) {
2554 return Location::ConstantLocation(instruction->AsConstant());
2555 // Otherwise fall through and require an FPU register for the constant.
2556 }
2557 return Location::RequiresFpuRegister();
2558}
2559
Alexey Frunze4dda3372015-06-01 18:31:49 -07002560void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002562
2563 bool needs_write_barrier =
2564 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2565 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2566
Vladimir Markoca6fff82017-10-03 14:49:14 +01002567 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002568 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002569 may_need_runtime_call_for_type_check ?
2570 LocationSummary::kCallOnSlowPath :
2571 LocationSummary::kNoCall);
2572
2573 locations->SetInAt(0, Location::RequiresRegister());
2574 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002575 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002576 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002577 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002578 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2579 }
2580 if (needs_write_barrier) {
2581 // Temporary register for the write barrier.
2582 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002583 }
2584}
2585
2586void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2587 LocationSummary* locations = instruction->GetLocations();
2588 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2589 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002590 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002591 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002592 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002593 bool needs_write_barrier =
2594 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002595 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002596 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002597
2598 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002599 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002600 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002601 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002602 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002603 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002604 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002605 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002606 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2607 }
2608 if (value_location.IsConstant()) {
2609 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2610 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2611 } else {
2612 GpuRegister value = value_location.AsRegister<GpuRegister>();
2613 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002614 }
2615 break;
2616 }
2617
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002618 case DataType::Type::kUint16:
2619 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002620 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002621 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002622 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002623 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002624 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002625 }
2626 if (value_location.IsConstant()) {
2627 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2628 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2629 } else {
2630 GpuRegister value = value_location.AsRegister<GpuRegister>();
2631 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002632 }
2633 break;
2634 }
2635
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002636 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002637 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2638 if (index.IsConstant()) {
2639 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2640 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002641 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002642 }
2643 if (value_location.IsConstant()) {
2644 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2645 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2646 } else {
2647 GpuRegister value = value_location.AsRegister<GpuRegister>();
2648 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2649 }
2650 break;
2651 }
2652
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002653 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002654 if (value_location.IsConstant()) {
2655 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002656 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002657 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002658 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002659 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002660 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002661 }
Alexey Frunze15958152017-02-09 19:08:30 -08002662 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2663 DCHECK_EQ(value, 0);
2664 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2665 DCHECK(!needs_write_barrier);
2666 DCHECK(!may_need_runtime_call_for_type_check);
2667 break;
2668 }
2669
2670 DCHECK(needs_write_barrier);
2671 GpuRegister value = value_location.AsRegister<GpuRegister>();
2672 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2673 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2674 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2675 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2676 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2677 Mips64Label done;
2678 SlowPathCodeMIPS64* slow_path = nullptr;
2679
2680 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002681 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002682 codegen_->AddSlowPath(slow_path);
2683 if (instruction->GetValueCanBeNull()) {
2684 Mips64Label non_zero;
2685 __ Bnezc(value, &non_zero);
2686 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2687 if (index.IsConstant()) {
2688 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002689 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002690 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002691 }
Alexey Frunze15958152017-02-09 19:08:30 -08002692 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2693 __ Bc(&done);
2694 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002695 }
Alexey Frunze15958152017-02-09 19:08:30 -08002696
2697 // Note that when read barriers are enabled, the type checks
2698 // are performed without read barriers. This is fine, even in
2699 // the case where a class object is in the from-space after
2700 // the flip, as a comparison involving such a type would not
2701 // produce a false positive; it may of course produce a false
2702 // negative, in which case we would take the ArraySet slow
2703 // path.
2704
2705 // /* HeapReference<Class> */ temp1 = obj->klass_
2706 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2707 __ MaybeUnpoisonHeapReference(temp1);
2708
2709 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2710 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2711 // /* HeapReference<Class> */ temp2 = value->klass_
2712 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2713 // If heap poisoning is enabled, no need to unpoison `temp1`
2714 // nor `temp2`, as we are comparing two poisoned references.
2715
2716 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2717 Mips64Label do_put;
2718 __ Beqc(temp1, temp2, &do_put);
2719 // If heap poisoning is enabled, the `temp1` reference has
2720 // not been unpoisoned yet; unpoison it now.
2721 __ MaybeUnpoisonHeapReference(temp1);
2722
2723 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2724 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2725 // If heap poisoning is enabled, no need to unpoison
2726 // `temp1`, as we are comparing against null below.
2727 __ Bnezc(temp1, slow_path->GetEntryLabel());
2728 __ Bind(&do_put);
2729 } else {
2730 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2731 }
2732 }
2733
2734 GpuRegister source = value;
2735 if (kPoisonHeapReferences) {
2736 // Note that in the case where `value` is a null reference,
2737 // we do not enter this block, as a null reference does not
2738 // need poisoning.
2739 __ Move(temp1, value);
2740 __ PoisonHeapReference(temp1);
2741 source = temp1;
2742 }
2743
2744 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2745 if (index.IsConstant()) {
2746 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002747 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002748 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002749 }
2750 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2751
2752 if (!may_need_runtime_call_for_type_check) {
2753 codegen_->MaybeRecordImplicitNullCheck(instruction);
2754 }
2755
2756 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2757
2758 if (done.IsLinked()) {
2759 __ Bind(&done);
2760 }
2761
2762 if (slow_path != nullptr) {
2763 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002764 }
2765 break;
2766 }
2767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002768 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002769 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002770 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002771 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002772 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002773 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002774 }
2775 if (value_location.IsConstant()) {
2776 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2777 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2778 } else {
2779 GpuRegister value = value_location.AsRegister<GpuRegister>();
2780 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002781 }
2782 break;
2783 }
2784
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002785 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002786 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002787 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002788 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002789 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002790 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002791 }
2792 if (value_location.IsConstant()) {
2793 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2794 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2795 } else {
2796 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2797 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002798 }
2799 break;
2800 }
2801
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002802 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002803 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002804 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002805 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002806 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002807 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002808 }
2809 if (value_location.IsConstant()) {
2810 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2811 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2812 } else {
2813 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2814 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002815 }
2816 break;
2817 }
2818
Aart Bik66c158e2018-01-31 12:55:04 -08002819 case DataType::Type::kUint32:
2820 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002821 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002822 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2823 UNREACHABLE();
2824 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002825}
2826
2827void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002828 RegisterSet caller_saves = RegisterSet::Empty();
2829 InvokeRuntimeCallingConvention calling_convention;
2830 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2831 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2832 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002833
2834 HInstruction* index = instruction->InputAt(0);
2835 HInstruction* length = instruction->InputAt(1);
2836
2837 bool const_index = false;
2838 bool const_length = false;
2839
2840 if (index->IsConstant()) {
2841 if (length->IsConstant()) {
2842 const_index = true;
2843 const_length = true;
2844 } else {
2845 int32_t index_value = index->AsIntConstant()->GetValue();
2846 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2847 const_index = true;
2848 }
2849 }
2850 } else if (length->IsConstant()) {
2851 int32_t length_value = length->AsIntConstant()->GetValue();
2852 if (IsUint<15>(length_value)) {
2853 const_length = true;
2854 }
2855 }
2856
2857 locations->SetInAt(0, const_index
2858 ? Location::ConstantLocation(index->AsConstant())
2859 : Location::RequiresRegister());
2860 locations->SetInAt(1, const_length
2861 ? Location::ConstantLocation(length->AsConstant())
2862 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002863}
2864
2865void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2866 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002867 Location index_loc = locations->InAt(0);
2868 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002869
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002870 if (length_loc.IsConstant()) {
2871 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2872 if (index_loc.IsConstant()) {
2873 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2874 if (index < 0 || index >= length) {
2875 BoundsCheckSlowPathMIPS64* slow_path =
2876 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2877 codegen_->AddSlowPath(slow_path);
2878 __ Bc(slow_path->GetEntryLabel());
2879 } else {
2880 // Nothing to be done.
2881 }
2882 return;
2883 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002884
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002885 BoundsCheckSlowPathMIPS64* slow_path =
2886 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2887 codegen_->AddSlowPath(slow_path);
2888 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2889 if (length == 0) {
2890 __ Bc(slow_path->GetEntryLabel());
2891 } else if (length == 1) {
2892 __ Bnezc(index, slow_path->GetEntryLabel());
2893 } else {
2894 DCHECK(IsUint<15>(length)) << length;
2895 __ Sltiu(TMP, index, length);
2896 __ Beqzc(TMP, slow_path->GetEntryLabel());
2897 }
2898 } else {
2899 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2900 BoundsCheckSlowPathMIPS64* slow_path =
2901 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2902 codegen_->AddSlowPath(slow_path);
2903 if (index_loc.IsConstant()) {
2904 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2905 if (index < 0) {
2906 __ Bc(slow_path->GetEntryLabel());
2907 } else if (index == 0) {
2908 __ Blezc(length, slow_path->GetEntryLabel());
2909 } else {
2910 DCHECK(IsInt<16>(index + 1)) << index;
2911 __ Sltiu(TMP, length, index + 1);
2912 __ Bnezc(TMP, slow_path->GetEntryLabel());
2913 }
2914 } else {
2915 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2916 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2917 }
2918 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002919}
2920
Alexey Frunze15958152017-02-09 19:08:30 -08002921// Temp is used for read barrier.
2922static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2923 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002924 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002925 (kUseBakerReadBarrier ||
2926 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2927 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2928 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2929 return 1;
2930 }
2931 return 0;
2932}
2933
2934// Extra temp is used for read barrier.
2935static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2936 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2937}
2938
Alexey Frunze4dda3372015-06-01 18:31:49 -07002939void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002940 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002941 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002942 LocationSummary* locations =
2943 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002944 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00002945 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
2946 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2947 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2948 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
2949 } else {
2950 locations->SetInAt(1, Location::RequiresRegister());
2951 }
Alexey Frunze15958152017-02-09 19:08:30 -08002952 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002953}
2954
2955void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002956 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002957 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002958 Location obj_loc = locations->InAt(0);
2959 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00002960 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08002961 Location temp_loc = locations->GetTemp(0);
2962 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2963 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2964 DCHECK_LE(num_temps, 2u);
2965 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002966 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2967 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2968 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2969 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2970 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2971 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2972 const uint32_t object_array_data_offset =
2973 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2974 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002975
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002976 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002977 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002978 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2979 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002980 codegen_->AddSlowPath(slow_path);
2981
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002982 // Avoid this check if we know `obj` is not null.
2983 if (instruction->MustDoNullCheck()) {
2984 __ Beqzc(obj, &done);
2985 }
2986
2987 switch (type_check_kind) {
2988 case TypeCheckKind::kExactCheck:
2989 case TypeCheckKind::kArrayCheck: {
2990 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002991 GenerateReferenceLoadTwoRegisters(instruction,
2992 temp_loc,
2993 obj_loc,
2994 class_offset,
2995 maybe_temp2_loc,
2996 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002997 // Jump to slow path for throwing the exception or doing a
2998 // more involved array check.
Vladimir Marko175e7862018-03-27 09:03:13 +00002999 __ Bnec(temp, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003000 break;
3001 }
3002
3003 case TypeCheckKind::kAbstractClassCheck: {
3004 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003005 GenerateReferenceLoadTwoRegisters(instruction,
3006 temp_loc,
3007 obj_loc,
3008 class_offset,
3009 maybe_temp2_loc,
3010 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003011 // If the class is abstract, we eagerly fetch the super class of the
3012 // object to avoid doing a comparison we know will fail.
3013 Mips64Label loop;
3014 __ Bind(&loop);
3015 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003016 GenerateReferenceLoadOneRegister(instruction,
3017 temp_loc,
3018 super_offset,
3019 maybe_temp2_loc,
3020 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003021 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3022 // exception.
3023 __ Beqzc(temp, slow_path->GetEntryLabel());
3024 // Otherwise, compare the classes.
Vladimir Marko175e7862018-03-27 09:03:13 +00003025 __ Bnec(temp, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003026 break;
3027 }
3028
3029 case TypeCheckKind::kClassHierarchyCheck: {
3030 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003031 GenerateReferenceLoadTwoRegisters(instruction,
3032 temp_loc,
3033 obj_loc,
3034 class_offset,
3035 maybe_temp2_loc,
3036 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003037 // Walk over the class hierarchy to find a match.
3038 Mips64Label loop;
3039 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00003040 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003041 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003042 GenerateReferenceLoadOneRegister(instruction,
3043 temp_loc,
3044 super_offset,
3045 maybe_temp2_loc,
3046 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003047 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3048 // exception. Otherwise, jump to the beginning of the loop.
3049 __ Bnezc(temp, &loop);
3050 __ Bc(slow_path->GetEntryLabel());
3051 break;
3052 }
3053
3054 case TypeCheckKind::kArrayObjectCheck: {
3055 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003056 GenerateReferenceLoadTwoRegisters(instruction,
3057 temp_loc,
3058 obj_loc,
3059 class_offset,
3060 maybe_temp2_loc,
3061 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003062 // Do an exact check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003063 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003064 // Otherwise, we need to check that the object's class is a non-primitive array.
3065 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003066 GenerateReferenceLoadOneRegister(instruction,
3067 temp_loc,
3068 component_offset,
3069 maybe_temp2_loc,
3070 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003071 // If the component type is null, jump to the slow path to throw the exception.
3072 __ Beqzc(temp, slow_path->GetEntryLabel());
3073 // Otherwise, the object is indeed an array, further check that this component
3074 // type is not a primitive type.
3075 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3076 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3077 __ Bnezc(temp, slow_path->GetEntryLabel());
3078 break;
3079 }
3080
3081 case TypeCheckKind::kUnresolvedCheck:
3082 // We always go into the type check slow path for the unresolved check case.
3083 // We cannot directly call the CheckCast runtime entry point
3084 // without resorting to a type checking slow path here (i.e. by
3085 // calling InvokeRuntime directly), as it would require to
3086 // assign fixed registers for the inputs of this HInstanceOf
3087 // instruction (following the runtime calling convention), which
3088 // might be cluttered by the potential first read barrier
3089 // emission at the beginning of this method.
3090 __ Bc(slow_path->GetEntryLabel());
3091 break;
3092
3093 case TypeCheckKind::kInterfaceCheck: {
3094 // Avoid read barriers to improve performance of the fast path. We can not get false
3095 // positives by doing this.
3096 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003097 GenerateReferenceLoadTwoRegisters(instruction,
3098 temp_loc,
3099 obj_loc,
3100 class_offset,
3101 maybe_temp2_loc,
3102 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003103 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003104 GenerateReferenceLoadTwoRegisters(instruction,
3105 temp_loc,
3106 temp_loc,
3107 iftable_offset,
3108 maybe_temp2_loc,
3109 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003110 // Iftable is never null.
3111 __ Lw(TMP, temp, array_length_offset);
3112 // Loop through the iftable and check if any class matches.
3113 Mips64Label loop;
3114 __ Bind(&loop);
3115 __ Beqzc(TMP, slow_path->GetEntryLabel());
3116 __ Lwu(AT, temp, object_array_data_offset);
3117 __ MaybeUnpoisonHeapReference(AT);
3118 // Go to next interface.
3119 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3120 __ Addiu(TMP, TMP, -2);
3121 // Compare the classes and continue the loop if they do not match.
Vladimir Marko175e7862018-03-27 09:03:13 +00003122 __ Bnec(AT, cls.AsRegister<GpuRegister>(), &loop);
3123 break;
3124 }
3125
3126 case TypeCheckKind::kBitstringCheck: {
3127 // /* HeapReference<Class> */ temp = obj->klass_
3128 GenerateReferenceLoadTwoRegisters(instruction,
3129 temp_loc,
3130 obj_loc,
3131 class_offset,
3132 maybe_temp2_loc,
3133 kWithoutReadBarrier);
3134
3135 GenerateBitstringTypeCheckCompare(instruction, temp);
3136 __ Bnezc(temp, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003137 break;
3138 }
3139 }
3140
3141 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003142 __ Bind(slow_path->GetExitLabel());
3143}
3144
3145void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3146 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003147 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003148 locations->SetInAt(0, Location::RequiresRegister());
3149 if (check->HasUses()) {
3150 locations->SetOut(Location::SameAsFirstInput());
3151 }
3152}
3153
3154void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3155 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003156 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze4dda3372015-06-01 18:31:49 -07003157 check->GetLoadClass(),
3158 check,
3159 check->GetDexPc(),
3160 true);
3161 codegen_->AddSlowPath(slow_path);
3162 GenerateClassInitializationCheck(slow_path,
3163 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3164}
3165
3166void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003167 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003168
Vladimir Markoca6fff82017-10-03 14:49:14 +01003169 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003170
3171 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003172 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003173 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003174 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003175 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003176 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003177 case DataType::Type::kInt32:
3178 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003179 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003180 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003181 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3182 break;
3183
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003184 case DataType::Type::kFloat32:
3185 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003186 locations->SetInAt(0, Location::RequiresFpuRegister());
3187 locations->SetInAt(1, Location::RequiresFpuRegister());
3188 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003189 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003190
3191 default:
3192 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3193 }
3194}
3195
3196void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3197 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003198 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003199 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003200
3201 // 0 if: left == right
3202 // 1 if: left > right
3203 // -1 if: left < right
3204 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003206 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003207 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003208 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003209 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003210 case DataType::Type::kInt32:
3211 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003212 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003213 Location rhs_location = locations->InAt(1);
3214 bool use_imm = rhs_location.IsConstant();
3215 GpuRegister rhs = ZERO;
3216 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003217 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003218 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3219 if (value != 0) {
3220 rhs = AT;
3221 __ LoadConst64(rhs, value);
3222 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003223 } else {
3224 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3225 if (value != 0) {
3226 rhs = AT;
3227 __ LoadConst32(rhs, value);
3228 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003229 }
3230 } else {
3231 rhs = rhs_location.AsRegister<GpuRegister>();
3232 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003233 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003234 __ Slt(res, rhs, lhs);
3235 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003236 break;
3237 }
3238
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003240 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3241 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3242 Mips64Label done;
3243 __ CmpEqS(FTMP, lhs, rhs);
3244 __ LoadConst32(res, 0);
3245 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003246 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003247 __ CmpLtS(FTMP, lhs, rhs);
3248 __ LoadConst32(res, -1);
3249 __ Bc1nez(FTMP, &done);
3250 __ LoadConst32(res, 1);
3251 } else {
3252 __ CmpLtS(FTMP, rhs, lhs);
3253 __ LoadConst32(res, 1);
3254 __ Bc1nez(FTMP, &done);
3255 __ LoadConst32(res, -1);
3256 }
3257 __ Bind(&done);
3258 break;
3259 }
3260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003261 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003262 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3263 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3264 Mips64Label done;
3265 __ CmpEqD(FTMP, lhs, rhs);
3266 __ LoadConst32(res, 0);
3267 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003268 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003269 __ CmpLtD(FTMP, lhs, rhs);
3270 __ LoadConst32(res, -1);
3271 __ Bc1nez(FTMP, &done);
3272 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003273 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003274 __ CmpLtD(FTMP, rhs, lhs);
3275 __ LoadConst32(res, 1);
3276 __ Bc1nez(FTMP, &done);
3277 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003278 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003279 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003280 break;
3281 }
3282
3283 default:
3284 LOG(FATAL) << "Unimplemented compare type " << in_type;
3285 }
3286}
3287
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003288void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003289 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003290 switch (instruction->InputAt(0)->GetType()) {
3291 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003292 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003293 locations->SetInAt(0, Location::RequiresRegister());
3294 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3295 break;
3296
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003297 case DataType::Type::kFloat32:
3298 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003299 locations->SetInAt(0, Location::RequiresFpuRegister());
3300 locations->SetInAt(1, Location::RequiresFpuRegister());
3301 break;
3302 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003303 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003304 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3305 }
3306}
3307
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003308void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003309 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003310 return;
3311 }
3312
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003313 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003314 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003315 switch (type) {
3316 default:
3317 // Integer case.
3318 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3319 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003320 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003321 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3322 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003323 case DataType::Type::kFloat32:
3324 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003325 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3326 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003327 }
3328}
3329
Alexey Frunzec857c742015-09-23 15:12:39 -07003330void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3331 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003332 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003333
3334 LocationSummary* locations = instruction->GetLocations();
3335 Location second = locations->InAt(1);
3336 DCHECK(second.IsConstant());
3337
3338 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3339 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3340 int64_t imm = Int64FromConstant(second.GetConstant());
3341 DCHECK(imm == 1 || imm == -1);
3342
3343 if (instruction->IsRem()) {
3344 __ Move(out, ZERO);
3345 } else {
3346 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003347 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003348 __ Subu(out, ZERO, dividend);
3349 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003350 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003351 __ Dsubu(out, ZERO, dividend);
3352 }
3353 } else if (out != dividend) {
3354 __ Move(out, dividend);
3355 }
3356 }
3357}
3358
3359void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3360 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003361 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003362
3363 LocationSummary* locations = instruction->GetLocations();
3364 Location second = locations->InAt(1);
3365 DCHECK(second.IsConstant());
3366
3367 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3368 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3369 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003370 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003371 int ctz_imm = CTZ(abs_imm);
3372
3373 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003374 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003375 if (ctz_imm == 1) {
3376 // Fast path for division by +/-2, which is very common.
3377 __ Srl(TMP, dividend, 31);
3378 } else {
3379 __ Sra(TMP, dividend, 31);
3380 __ Srl(TMP, TMP, 32 - ctz_imm);
3381 }
3382 __ Addu(out, dividend, TMP);
3383 __ Sra(out, out, ctz_imm);
3384 if (imm < 0) {
3385 __ Subu(out, ZERO, out);
3386 }
3387 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003388 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003389 if (ctz_imm == 1) {
3390 // Fast path for division by +/-2, which is very common.
3391 __ Dsrl32(TMP, dividend, 31);
3392 } else {
3393 __ Dsra32(TMP, dividend, 31);
3394 if (ctz_imm > 32) {
3395 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3396 } else {
3397 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3398 }
3399 }
3400 __ Daddu(out, dividend, TMP);
3401 if (ctz_imm < 32) {
3402 __ Dsra(out, out, ctz_imm);
3403 } else {
3404 __ Dsra32(out, out, ctz_imm - 32);
3405 }
3406 if (imm < 0) {
3407 __ Dsubu(out, ZERO, out);
3408 }
3409 }
3410 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003411 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003412 if (ctz_imm == 1) {
3413 // Fast path for modulo +/-2, which is very common.
3414 __ Sra(TMP, dividend, 31);
3415 __ Subu(out, dividend, TMP);
3416 __ Andi(out, out, 1);
3417 __ Addu(out, out, TMP);
3418 } else {
3419 __ Sra(TMP, dividend, 31);
3420 __ Srl(TMP, TMP, 32 - ctz_imm);
3421 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003422 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003423 __ Subu(out, out, TMP);
3424 }
3425 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003426 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003427 if (ctz_imm == 1) {
3428 // Fast path for modulo +/-2, which is very common.
3429 __ Dsra32(TMP, dividend, 31);
3430 __ Dsubu(out, dividend, TMP);
3431 __ Andi(out, out, 1);
3432 __ Daddu(out, out, TMP);
3433 } else {
3434 __ Dsra32(TMP, dividend, 31);
3435 if (ctz_imm > 32) {
3436 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3437 } else {
3438 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3439 }
3440 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003441 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003442 __ Dsubu(out, out, TMP);
3443 }
3444 }
3445 }
3446}
3447
3448void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3449 DCHECK(instruction->IsDiv() || instruction->IsRem());
3450
3451 LocationSummary* locations = instruction->GetLocations();
3452 Location second = locations->InAt(1);
3453 DCHECK(second.IsConstant());
3454
3455 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3456 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3457 int64_t imm = Int64FromConstant(second.GetConstant());
3458
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003459 DataType::Type type = instruction->GetResultType();
3460 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003461
3462 int64_t magic;
3463 int shift;
3464 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003465 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003466 &magic,
3467 &shift);
3468
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003469 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003470 __ LoadConst32(TMP, magic);
3471 __ MuhR6(TMP, dividend, TMP);
3472
3473 if (imm > 0 && magic < 0) {
3474 __ Addu(TMP, TMP, dividend);
3475 } else if (imm < 0 && magic > 0) {
3476 __ Subu(TMP, TMP, dividend);
3477 }
3478
3479 if (shift != 0) {
3480 __ Sra(TMP, TMP, shift);
3481 }
3482
3483 if (instruction->IsDiv()) {
3484 __ Sra(out, TMP, 31);
3485 __ Subu(out, TMP, out);
3486 } else {
3487 __ Sra(AT, TMP, 31);
3488 __ Subu(AT, TMP, AT);
3489 __ LoadConst32(TMP, imm);
3490 __ MulR6(TMP, AT, TMP);
3491 __ Subu(out, dividend, TMP);
3492 }
3493 } else {
3494 __ LoadConst64(TMP, magic);
3495 __ Dmuh(TMP, dividend, TMP);
3496
3497 if (imm > 0 && magic < 0) {
3498 __ Daddu(TMP, TMP, dividend);
3499 } else if (imm < 0 && magic > 0) {
3500 __ Dsubu(TMP, TMP, dividend);
3501 }
3502
3503 if (shift >= 32) {
3504 __ Dsra32(TMP, TMP, shift - 32);
3505 } else if (shift > 0) {
3506 __ Dsra(TMP, TMP, shift);
3507 }
3508
3509 if (instruction->IsDiv()) {
3510 __ Dsra32(out, TMP, 31);
3511 __ Dsubu(out, TMP, out);
3512 } else {
3513 __ Dsra32(AT, TMP, 31);
3514 __ Dsubu(AT, TMP, AT);
3515 __ LoadConst64(TMP, imm);
3516 __ Dmul(TMP, AT, TMP);
3517 __ Dsubu(out, dividend, TMP);
3518 }
3519 }
3520}
3521
3522void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3523 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003524 DataType::Type type = instruction->GetResultType();
3525 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003526
3527 LocationSummary* locations = instruction->GetLocations();
3528 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3529 Location second = locations->InAt(1);
3530
3531 if (second.IsConstant()) {
3532 int64_t imm = Int64FromConstant(second.GetConstant());
3533 if (imm == 0) {
3534 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3535 } else if (imm == 1 || imm == -1) {
3536 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003537 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003538 DivRemByPowerOfTwo(instruction);
3539 } else {
3540 DCHECK(imm <= -2 || imm >= 2);
3541 GenerateDivRemWithAnyConstant(instruction);
3542 }
3543 } else {
3544 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3545 GpuRegister divisor = second.AsRegister<GpuRegister>();
3546 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003547 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003548 __ DivR6(out, dividend, divisor);
3549 else
3550 __ Ddiv(out, dividend, divisor);
3551 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003552 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003553 __ ModR6(out, dividend, divisor);
3554 else
3555 __ Dmod(out, dividend, divisor);
3556 }
3557 }
3558}
3559
Alexey Frunze4dda3372015-06-01 18:31:49 -07003560void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3561 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003562 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003563 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003564 case DataType::Type::kInt32:
3565 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003566 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003567 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003568 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3569 break;
3570
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003571 case DataType::Type::kFloat32:
3572 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003573 locations->SetInAt(0, Location::RequiresFpuRegister());
3574 locations->SetInAt(1, Location::RequiresFpuRegister());
3575 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3576 break;
3577
3578 default:
3579 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3580 }
3581}
3582
3583void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003584 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003585 LocationSummary* locations = instruction->GetLocations();
3586
3587 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003588 case DataType::Type::kInt32:
3589 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003590 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003591 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003592 case DataType::Type::kFloat32:
3593 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003594 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3595 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3596 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003597 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003598 __ DivS(dst, lhs, rhs);
3599 else
3600 __ DivD(dst, lhs, rhs);
3601 break;
3602 }
3603 default:
3604 LOG(FATAL) << "Unexpected div type " << type;
3605 }
3606}
3607
3608void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003609 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003610 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003611}
3612
3613void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3614 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003615 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003616 codegen_->AddSlowPath(slow_path);
3617 Location value = instruction->GetLocations()->InAt(0);
3618
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003619 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003620
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003621 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003622 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003623 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003624 }
3625
3626 if (value.IsConstant()) {
3627 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3628 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003629 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003630 } else {
3631 // A division by a non-null constant is valid. We don't need to perform
3632 // any check, so simply fall through.
3633 }
3634 } else {
3635 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3636 }
3637}
3638
3639void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3640 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003641 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003642 locations->SetOut(Location::ConstantLocation(constant));
3643}
3644
3645void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3646 // Will be generated at use site.
3647}
3648
3649void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3650 exit->SetLocations(nullptr);
3651}
3652
3653void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3654}
3655
3656void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3657 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003658 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003659 locations->SetOut(Location::ConstantLocation(constant));
3660}
3661
3662void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3663 // Will be generated at use site.
3664}
3665
David Brazdilfc6a86a2015-06-26 10:33:45 +00003666void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003667 if (successor->IsExitBlock()) {
3668 DCHECK(got->GetPrevious()->AlwaysThrows());
3669 return; // no code needed
3670 }
3671
Alexey Frunze4dda3372015-06-01 18:31:49 -07003672 HBasicBlock* block = got->GetBlock();
3673 HInstruction* previous = got->GetPrevious();
3674 HLoopInformation* info = block->GetLoopInformation();
3675
3676 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01003677 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3678 __ Ld(AT, SP, kCurrentMethodStackOffset);
3679 __ Lhu(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3680 __ Addiu(TMP, TMP, 1);
3681 __ Sh(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3682 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003683 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3684 return;
3685 }
3686 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3687 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3688 }
3689 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003690 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003691 }
3692}
3693
David Brazdilfc6a86a2015-06-26 10:33:45 +00003694void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3695 got->SetLocations(nullptr);
3696}
3697
3698void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3699 HandleGoto(got, got->GetSuccessor());
3700}
3701
3702void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3703 try_boundary->SetLocations(nullptr);
3704}
3705
3706void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3707 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3708 if (!successor->IsExitBlock()) {
3709 HandleGoto(try_boundary, successor);
3710 }
3711}
3712
Alexey Frunze299a9392015-12-08 16:08:02 -08003713void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3714 bool is64bit,
3715 LocationSummary* locations) {
3716 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3717 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3718 Location rhs_location = locations->InAt(1);
3719 GpuRegister rhs_reg = ZERO;
3720 int64_t rhs_imm = 0;
3721 bool use_imm = rhs_location.IsConstant();
3722 if (use_imm) {
3723 if (is64bit) {
3724 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3725 } else {
3726 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3727 }
3728 } else {
3729 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3730 }
3731 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3732
3733 switch (cond) {
3734 case kCondEQ:
3735 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003736 if (use_imm && IsInt<16>(-rhs_imm)) {
3737 if (rhs_imm == 0) {
3738 if (cond == kCondEQ) {
3739 __ Sltiu(dst, lhs, 1);
3740 } else {
3741 __ Sltu(dst, ZERO, lhs);
3742 }
3743 } else {
3744 if (is64bit) {
3745 __ Daddiu(dst, lhs, -rhs_imm);
3746 } else {
3747 __ Addiu(dst, lhs, -rhs_imm);
3748 }
3749 if (cond == kCondEQ) {
3750 __ Sltiu(dst, dst, 1);
3751 } else {
3752 __ Sltu(dst, ZERO, dst);
3753 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003754 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003755 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003756 if (use_imm && IsUint<16>(rhs_imm)) {
3757 __ Xori(dst, lhs, rhs_imm);
3758 } else {
3759 if (use_imm) {
3760 rhs_reg = TMP;
3761 __ LoadConst64(rhs_reg, rhs_imm);
3762 }
3763 __ Xor(dst, lhs, rhs_reg);
3764 }
3765 if (cond == kCondEQ) {
3766 __ Sltiu(dst, dst, 1);
3767 } else {
3768 __ Sltu(dst, ZERO, dst);
3769 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003770 }
3771 break;
3772
3773 case kCondLT:
3774 case kCondGE:
3775 if (use_imm && IsInt<16>(rhs_imm)) {
3776 __ Slti(dst, lhs, rhs_imm);
3777 } else {
3778 if (use_imm) {
3779 rhs_reg = TMP;
3780 __ LoadConst64(rhs_reg, rhs_imm);
3781 }
3782 __ Slt(dst, lhs, rhs_reg);
3783 }
3784 if (cond == kCondGE) {
3785 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3786 // only the slt instruction but no sge.
3787 __ Xori(dst, dst, 1);
3788 }
3789 break;
3790
3791 case kCondLE:
3792 case kCondGT:
3793 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3794 // Simulate lhs <= rhs via lhs < rhs + 1.
3795 __ Slti(dst, lhs, rhs_imm_plus_one);
3796 if (cond == kCondGT) {
3797 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3798 // only the slti instruction but no sgti.
3799 __ Xori(dst, dst, 1);
3800 }
3801 } else {
3802 if (use_imm) {
3803 rhs_reg = TMP;
3804 __ LoadConst64(rhs_reg, rhs_imm);
3805 }
3806 __ Slt(dst, rhs_reg, lhs);
3807 if (cond == kCondLE) {
3808 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3809 // only the slt instruction but no sle.
3810 __ Xori(dst, dst, 1);
3811 }
3812 }
3813 break;
3814
3815 case kCondB:
3816 case kCondAE:
3817 if (use_imm && IsInt<16>(rhs_imm)) {
3818 // Sltiu sign-extends its 16-bit immediate operand before
3819 // the comparison and thus lets us compare directly with
3820 // unsigned values in the ranges [0, 0x7fff] and
3821 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3822 __ Sltiu(dst, lhs, rhs_imm);
3823 } else {
3824 if (use_imm) {
3825 rhs_reg = TMP;
3826 __ LoadConst64(rhs_reg, rhs_imm);
3827 }
3828 __ Sltu(dst, lhs, rhs_reg);
3829 }
3830 if (cond == kCondAE) {
3831 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3832 // only the sltu instruction but no sgeu.
3833 __ Xori(dst, dst, 1);
3834 }
3835 break;
3836
3837 case kCondBE:
3838 case kCondA:
3839 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3840 // Simulate lhs <= rhs via lhs < rhs + 1.
3841 // Note that this only works if rhs + 1 does not overflow
3842 // to 0, hence the check above.
3843 // Sltiu sign-extends its 16-bit immediate operand before
3844 // the comparison and thus lets us compare directly with
3845 // unsigned values in the ranges [0, 0x7fff] and
3846 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3847 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3848 if (cond == kCondA) {
3849 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3850 // only the sltiu instruction but no sgtiu.
3851 __ Xori(dst, dst, 1);
3852 }
3853 } else {
3854 if (use_imm) {
3855 rhs_reg = TMP;
3856 __ LoadConst64(rhs_reg, rhs_imm);
3857 }
3858 __ Sltu(dst, rhs_reg, lhs);
3859 if (cond == kCondBE) {
3860 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3861 // only the sltu instruction but no sleu.
3862 __ Xori(dst, dst, 1);
3863 }
3864 }
3865 break;
3866 }
3867}
3868
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003869bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3870 bool is64bit,
3871 LocationSummary* input_locations,
3872 GpuRegister dst) {
3873 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3874 Location rhs_location = input_locations->InAt(1);
3875 GpuRegister rhs_reg = ZERO;
3876 int64_t rhs_imm = 0;
3877 bool use_imm = rhs_location.IsConstant();
3878 if (use_imm) {
3879 if (is64bit) {
3880 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3881 } else {
3882 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3883 }
3884 } else {
3885 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3886 }
3887 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3888
3889 switch (cond) {
3890 case kCondEQ:
3891 case kCondNE:
3892 if (use_imm && IsInt<16>(-rhs_imm)) {
3893 if (is64bit) {
3894 __ Daddiu(dst, lhs, -rhs_imm);
3895 } else {
3896 __ Addiu(dst, lhs, -rhs_imm);
3897 }
3898 } else if (use_imm && IsUint<16>(rhs_imm)) {
3899 __ Xori(dst, lhs, rhs_imm);
3900 } else {
3901 if (use_imm) {
3902 rhs_reg = TMP;
3903 __ LoadConst64(rhs_reg, rhs_imm);
3904 }
3905 __ Xor(dst, lhs, rhs_reg);
3906 }
3907 return (cond == kCondEQ);
3908
3909 case kCondLT:
3910 case kCondGE:
3911 if (use_imm && IsInt<16>(rhs_imm)) {
3912 __ Slti(dst, lhs, rhs_imm);
3913 } else {
3914 if (use_imm) {
3915 rhs_reg = TMP;
3916 __ LoadConst64(rhs_reg, rhs_imm);
3917 }
3918 __ Slt(dst, lhs, rhs_reg);
3919 }
3920 return (cond == kCondGE);
3921
3922 case kCondLE:
3923 case kCondGT:
3924 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3925 // Simulate lhs <= rhs via lhs < rhs + 1.
3926 __ Slti(dst, lhs, rhs_imm_plus_one);
3927 return (cond == kCondGT);
3928 } else {
3929 if (use_imm) {
3930 rhs_reg = TMP;
3931 __ LoadConst64(rhs_reg, rhs_imm);
3932 }
3933 __ Slt(dst, rhs_reg, lhs);
3934 return (cond == kCondLE);
3935 }
3936
3937 case kCondB:
3938 case kCondAE:
3939 if (use_imm && IsInt<16>(rhs_imm)) {
3940 // Sltiu sign-extends its 16-bit immediate operand before
3941 // the comparison and thus lets us compare directly with
3942 // unsigned values in the ranges [0, 0x7fff] and
3943 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3944 __ Sltiu(dst, lhs, rhs_imm);
3945 } else {
3946 if (use_imm) {
3947 rhs_reg = TMP;
3948 __ LoadConst64(rhs_reg, rhs_imm);
3949 }
3950 __ Sltu(dst, lhs, rhs_reg);
3951 }
3952 return (cond == kCondAE);
3953
3954 case kCondBE:
3955 case kCondA:
3956 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3957 // Simulate lhs <= rhs via lhs < rhs + 1.
3958 // Note that this only works if rhs + 1 does not overflow
3959 // to 0, hence the check above.
3960 // Sltiu sign-extends its 16-bit immediate operand before
3961 // the comparison and thus lets us compare directly with
3962 // unsigned values in the ranges [0, 0x7fff] and
3963 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3964 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3965 return (cond == kCondA);
3966 } else {
3967 if (use_imm) {
3968 rhs_reg = TMP;
3969 __ LoadConst64(rhs_reg, rhs_imm);
3970 }
3971 __ Sltu(dst, rhs_reg, lhs);
3972 return (cond == kCondBE);
3973 }
3974 }
3975}
3976
Alexey Frunze299a9392015-12-08 16:08:02 -08003977void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3978 bool is64bit,
3979 LocationSummary* locations,
3980 Mips64Label* label) {
3981 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3982 Location rhs_location = locations->InAt(1);
3983 GpuRegister rhs_reg = ZERO;
3984 int64_t rhs_imm = 0;
3985 bool use_imm = rhs_location.IsConstant();
3986 if (use_imm) {
3987 if (is64bit) {
3988 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3989 } else {
3990 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3991 }
3992 } else {
3993 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3994 }
3995
3996 if (use_imm && rhs_imm == 0) {
3997 switch (cond) {
3998 case kCondEQ:
3999 case kCondBE: // <= 0 if zero
4000 __ Beqzc(lhs, label);
4001 break;
4002 case kCondNE:
4003 case kCondA: // > 0 if non-zero
4004 __ Bnezc(lhs, label);
4005 break;
4006 case kCondLT:
4007 __ Bltzc(lhs, label);
4008 break;
4009 case kCondGE:
4010 __ Bgezc(lhs, label);
4011 break;
4012 case kCondLE:
4013 __ Blezc(lhs, label);
4014 break;
4015 case kCondGT:
4016 __ Bgtzc(lhs, label);
4017 break;
4018 case kCondB: // always false
4019 break;
4020 case kCondAE: // always true
4021 __ Bc(label);
4022 break;
4023 }
4024 } else {
4025 if (use_imm) {
4026 rhs_reg = TMP;
4027 __ LoadConst64(rhs_reg, rhs_imm);
4028 }
4029 switch (cond) {
4030 case kCondEQ:
4031 __ Beqc(lhs, rhs_reg, label);
4032 break;
4033 case kCondNE:
4034 __ Bnec(lhs, rhs_reg, label);
4035 break;
4036 case kCondLT:
4037 __ Bltc(lhs, rhs_reg, label);
4038 break;
4039 case kCondGE:
4040 __ Bgec(lhs, rhs_reg, label);
4041 break;
4042 case kCondLE:
4043 __ Bgec(rhs_reg, lhs, label);
4044 break;
4045 case kCondGT:
4046 __ Bltc(rhs_reg, lhs, label);
4047 break;
4048 case kCondB:
4049 __ Bltuc(lhs, rhs_reg, label);
4050 break;
4051 case kCondAE:
4052 __ Bgeuc(lhs, rhs_reg, label);
4053 break;
4054 case kCondBE:
4055 __ Bgeuc(rhs_reg, lhs, label);
4056 break;
4057 case kCondA:
4058 __ Bltuc(rhs_reg, lhs, label);
4059 break;
4060 }
4061 }
4062}
4063
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004064void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
4065 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004066 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004067 LocationSummary* locations) {
4068 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4069 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4070 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004071 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004072 switch (cond) {
4073 case kCondEQ:
4074 __ CmpEqS(FTMP, lhs, rhs);
4075 __ Mfc1(dst, FTMP);
4076 __ Andi(dst, dst, 1);
4077 break;
4078 case kCondNE:
4079 __ CmpEqS(FTMP, lhs, rhs);
4080 __ Mfc1(dst, FTMP);
4081 __ Addiu(dst, dst, 1);
4082 break;
4083 case kCondLT:
4084 if (gt_bias) {
4085 __ CmpLtS(FTMP, lhs, rhs);
4086 } else {
4087 __ CmpUltS(FTMP, lhs, rhs);
4088 }
4089 __ Mfc1(dst, FTMP);
4090 __ Andi(dst, dst, 1);
4091 break;
4092 case kCondLE:
4093 if (gt_bias) {
4094 __ CmpLeS(FTMP, lhs, rhs);
4095 } else {
4096 __ CmpUleS(FTMP, lhs, rhs);
4097 }
4098 __ Mfc1(dst, FTMP);
4099 __ Andi(dst, dst, 1);
4100 break;
4101 case kCondGT:
4102 if (gt_bias) {
4103 __ CmpUltS(FTMP, rhs, lhs);
4104 } else {
4105 __ CmpLtS(FTMP, rhs, lhs);
4106 }
4107 __ Mfc1(dst, FTMP);
4108 __ Andi(dst, dst, 1);
4109 break;
4110 case kCondGE:
4111 if (gt_bias) {
4112 __ CmpUleS(FTMP, rhs, lhs);
4113 } else {
4114 __ CmpLeS(FTMP, rhs, lhs);
4115 }
4116 __ Mfc1(dst, FTMP);
4117 __ Andi(dst, dst, 1);
4118 break;
4119 default:
4120 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4121 UNREACHABLE();
4122 }
4123 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004124 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004125 switch (cond) {
4126 case kCondEQ:
4127 __ CmpEqD(FTMP, lhs, rhs);
4128 __ Mfc1(dst, FTMP);
4129 __ Andi(dst, dst, 1);
4130 break;
4131 case kCondNE:
4132 __ CmpEqD(FTMP, lhs, rhs);
4133 __ Mfc1(dst, FTMP);
4134 __ Addiu(dst, dst, 1);
4135 break;
4136 case kCondLT:
4137 if (gt_bias) {
4138 __ CmpLtD(FTMP, lhs, rhs);
4139 } else {
4140 __ CmpUltD(FTMP, lhs, rhs);
4141 }
4142 __ Mfc1(dst, FTMP);
4143 __ Andi(dst, dst, 1);
4144 break;
4145 case kCondLE:
4146 if (gt_bias) {
4147 __ CmpLeD(FTMP, lhs, rhs);
4148 } else {
4149 __ CmpUleD(FTMP, lhs, rhs);
4150 }
4151 __ Mfc1(dst, FTMP);
4152 __ Andi(dst, dst, 1);
4153 break;
4154 case kCondGT:
4155 if (gt_bias) {
4156 __ CmpUltD(FTMP, rhs, lhs);
4157 } else {
4158 __ CmpLtD(FTMP, rhs, lhs);
4159 }
4160 __ Mfc1(dst, FTMP);
4161 __ Andi(dst, dst, 1);
4162 break;
4163 case kCondGE:
4164 if (gt_bias) {
4165 __ CmpUleD(FTMP, rhs, lhs);
4166 } else {
4167 __ CmpLeD(FTMP, rhs, lhs);
4168 }
4169 __ Mfc1(dst, FTMP);
4170 __ Andi(dst, dst, 1);
4171 break;
4172 default:
4173 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4174 UNREACHABLE();
4175 }
4176 }
4177}
4178
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004179bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4180 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004181 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004182 LocationSummary* input_locations,
4183 FpuRegister dst) {
4184 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4185 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004186 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004187 switch (cond) {
4188 case kCondEQ:
4189 __ CmpEqS(dst, lhs, rhs);
4190 return false;
4191 case kCondNE:
4192 __ CmpEqS(dst, lhs, rhs);
4193 return true;
4194 case kCondLT:
4195 if (gt_bias) {
4196 __ CmpLtS(dst, lhs, rhs);
4197 } else {
4198 __ CmpUltS(dst, lhs, rhs);
4199 }
4200 return false;
4201 case kCondLE:
4202 if (gt_bias) {
4203 __ CmpLeS(dst, lhs, rhs);
4204 } else {
4205 __ CmpUleS(dst, lhs, rhs);
4206 }
4207 return false;
4208 case kCondGT:
4209 if (gt_bias) {
4210 __ CmpUltS(dst, rhs, lhs);
4211 } else {
4212 __ CmpLtS(dst, rhs, lhs);
4213 }
4214 return false;
4215 case kCondGE:
4216 if (gt_bias) {
4217 __ CmpUleS(dst, rhs, lhs);
4218 } else {
4219 __ CmpLeS(dst, rhs, lhs);
4220 }
4221 return false;
4222 default:
4223 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4224 UNREACHABLE();
4225 }
4226 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004227 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004228 switch (cond) {
4229 case kCondEQ:
4230 __ CmpEqD(dst, lhs, rhs);
4231 return false;
4232 case kCondNE:
4233 __ CmpEqD(dst, lhs, rhs);
4234 return true;
4235 case kCondLT:
4236 if (gt_bias) {
4237 __ CmpLtD(dst, lhs, rhs);
4238 } else {
4239 __ CmpUltD(dst, lhs, rhs);
4240 }
4241 return false;
4242 case kCondLE:
4243 if (gt_bias) {
4244 __ CmpLeD(dst, lhs, rhs);
4245 } else {
4246 __ CmpUleD(dst, lhs, rhs);
4247 }
4248 return false;
4249 case kCondGT:
4250 if (gt_bias) {
4251 __ CmpUltD(dst, rhs, lhs);
4252 } else {
4253 __ CmpLtD(dst, rhs, lhs);
4254 }
4255 return false;
4256 case kCondGE:
4257 if (gt_bias) {
4258 __ CmpUleD(dst, rhs, lhs);
4259 } else {
4260 __ CmpLeD(dst, rhs, lhs);
4261 }
4262 return false;
4263 default:
4264 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4265 UNREACHABLE();
4266 }
4267 }
4268}
4269
Alexey Frunze299a9392015-12-08 16:08:02 -08004270void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4271 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004272 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004273 LocationSummary* locations,
4274 Mips64Label* label) {
4275 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4276 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004277 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004278 switch (cond) {
4279 case kCondEQ:
4280 __ CmpEqS(FTMP, lhs, rhs);
4281 __ Bc1nez(FTMP, label);
4282 break;
4283 case kCondNE:
4284 __ CmpEqS(FTMP, lhs, rhs);
4285 __ Bc1eqz(FTMP, label);
4286 break;
4287 case kCondLT:
4288 if (gt_bias) {
4289 __ CmpLtS(FTMP, lhs, rhs);
4290 } else {
4291 __ CmpUltS(FTMP, lhs, rhs);
4292 }
4293 __ Bc1nez(FTMP, label);
4294 break;
4295 case kCondLE:
4296 if (gt_bias) {
4297 __ CmpLeS(FTMP, lhs, rhs);
4298 } else {
4299 __ CmpUleS(FTMP, lhs, rhs);
4300 }
4301 __ Bc1nez(FTMP, label);
4302 break;
4303 case kCondGT:
4304 if (gt_bias) {
4305 __ CmpUltS(FTMP, rhs, lhs);
4306 } else {
4307 __ CmpLtS(FTMP, rhs, lhs);
4308 }
4309 __ Bc1nez(FTMP, label);
4310 break;
4311 case kCondGE:
4312 if (gt_bias) {
4313 __ CmpUleS(FTMP, rhs, lhs);
4314 } else {
4315 __ CmpLeS(FTMP, rhs, lhs);
4316 }
4317 __ Bc1nez(FTMP, label);
4318 break;
4319 default:
4320 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004321 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004322 }
4323 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004324 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004325 switch (cond) {
4326 case kCondEQ:
4327 __ CmpEqD(FTMP, lhs, rhs);
4328 __ Bc1nez(FTMP, label);
4329 break;
4330 case kCondNE:
4331 __ CmpEqD(FTMP, lhs, rhs);
4332 __ Bc1eqz(FTMP, label);
4333 break;
4334 case kCondLT:
4335 if (gt_bias) {
4336 __ CmpLtD(FTMP, lhs, rhs);
4337 } else {
4338 __ CmpUltD(FTMP, lhs, rhs);
4339 }
4340 __ Bc1nez(FTMP, label);
4341 break;
4342 case kCondLE:
4343 if (gt_bias) {
4344 __ CmpLeD(FTMP, lhs, rhs);
4345 } else {
4346 __ CmpUleD(FTMP, lhs, rhs);
4347 }
4348 __ Bc1nez(FTMP, label);
4349 break;
4350 case kCondGT:
4351 if (gt_bias) {
4352 __ CmpUltD(FTMP, rhs, lhs);
4353 } else {
4354 __ CmpLtD(FTMP, rhs, lhs);
4355 }
4356 __ Bc1nez(FTMP, label);
4357 break;
4358 case kCondGE:
4359 if (gt_bias) {
4360 __ CmpUleD(FTMP, rhs, lhs);
4361 } else {
4362 __ CmpLeD(FTMP, rhs, lhs);
4363 }
4364 __ Bc1nez(FTMP, label);
4365 break;
4366 default:
4367 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004368 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004369 }
4370 }
4371}
4372
Alexey Frunze4dda3372015-06-01 18:31:49 -07004373void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004374 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004375 Mips64Label* true_target,
4376 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004377 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004378
David Brazdil0debae72015-11-12 18:37:00 +00004379 if (true_target == nullptr && false_target == nullptr) {
4380 // Nothing to do. The code always falls through.
4381 return;
4382 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004383 // Constant condition, statically compared against "true" (integer value 1).
4384 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004385 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004386 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004387 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004388 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004389 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004390 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004391 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004392 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004393 }
David Brazdil0debae72015-11-12 18:37:00 +00004394 return;
4395 }
4396
4397 // The following code generates these patterns:
4398 // (1) true_target == nullptr && false_target != nullptr
4399 // - opposite condition true => branch to false_target
4400 // (2) true_target != nullptr && false_target == nullptr
4401 // - condition true => branch to true_target
4402 // (3) true_target != nullptr && false_target != nullptr
4403 // - condition true => branch to true_target
4404 // - branch to false_target
4405 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004406 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004407 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004408 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004409 if (true_target == nullptr) {
4410 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4411 } else {
4412 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4413 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004414 } else {
4415 // The condition instruction has not been materialized, use its inputs as
4416 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004417 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004418 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004419 LocationSummary* locations = cond->GetLocations();
4420 IfCondition if_cond = condition->GetCondition();
4421 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004422
David Brazdil0debae72015-11-12 18:37:00 +00004423 if (true_target == nullptr) {
4424 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004425 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004426 }
4427
Alexey Frunze299a9392015-12-08 16:08:02 -08004428 switch (type) {
4429 default:
4430 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4431 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004432 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004433 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4434 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004435 case DataType::Type::kFloat32:
4436 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004437 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4438 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004439 }
4440 }
David Brazdil0debae72015-11-12 18:37:00 +00004441
4442 // If neither branch falls through (case 3), the conditional branch to `true_target`
4443 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4444 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004445 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004446 }
4447}
4448
4449void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004450 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004451 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004452 locations->SetInAt(0, Location::RequiresRegister());
4453 }
4454}
4455
4456void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004457 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4458 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004459 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004460 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004461 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004462 nullptr : codegen_->GetLabelOf(false_successor);
4463 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004464}
4465
4466void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004467 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004468 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004469 InvokeRuntimeCallingConvention calling_convention;
4470 RegisterSet caller_saves = RegisterSet::Empty();
4471 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4472 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004473 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004474 locations->SetInAt(0, Location::RequiresRegister());
4475 }
4476}
4477
4478void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004479 SlowPathCodeMIPS64* slow_path =
4480 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004481 GenerateTestAndBranch(deoptimize,
4482 /* condition_input_index */ 0,
4483 slow_path->GetEntryLabel(),
4484 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004485}
4486
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004487// This function returns true if a conditional move can be generated for HSelect.
4488// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4489// branches and regular moves.
4490//
4491// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4492//
4493// While determining feasibility of a conditional move and setting inputs/outputs
4494// are two distinct tasks, this function does both because they share quite a bit
4495// of common logic.
4496static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4497 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4498 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4499 HCondition* condition = cond->AsCondition();
4500
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004501 DataType::Type cond_type =
4502 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4503 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004504
4505 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4506 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4507 bool is_true_value_zero_constant =
4508 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4509 bool is_false_value_zero_constant =
4510 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4511
4512 bool can_move_conditionally = false;
4513 bool use_const_for_false_in = false;
4514 bool use_const_for_true_in = false;
4515
4516 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004517 if (!DataType::IsFloatingPointType(cond_type)) {
4518 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004519 // Moving int/long on int/long condition.
4520 if (is_true_value_zero_constant) {
4521 // seleqz out_reg, false_reg, cond_reg
4522 can_move_conditionally = true;
4523 use_const_for_true_in = true;
4524 } else if (is_false_value_zero_constant) {
4525 // selnez out_reg, true_reg, cond_reg
4526 can_move_conditionally = true;
4527 use_const_for_false_in = true;
4528 } else if (materialized) {
4529 // Not materializing unmaterialized int conditions
4530 // to keep the instruction count low.
4531 // selnez AT, true_reg, cond_reg
4532 // seleqz TMP, false_reg, cond_reg
4533 // or out_reg, AT, TMP
4534 can_move_conditionally = true;
4535 }
4536 } else {
4537 // Moving float/double on int/long condition.
4538 if (materialized) {
4539 // Not materializing unmaterialized int conditions
4540 // to keep the instruction count low.
4541 can_move_conditionally = true;
4542 if (is_true_value_zero_constant) {
4543 // sltu TMP, ZERO, cond_reg
4544 // mtc1 TMP, temp_cond_reg
4545 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4546 use_const_for_true_in = true;
4547 } else if (is_false_value_zero_constant) {
4548 // sltu TMP, ZERO, cond_reg
4549 // mtc1 TMP, temp_cond_reg
4550 // selnez.fmt out_reg, true_reg, temp_cond_reg
4551 use_const_for_false_in = true;
4552 } else {
4553 // sltu TMP, ZERO, cond_reg
4554 // mtc1 TMP, temp_cond_reg
4555 // sel.fmt temp_cond_reg, false_reg, true_reg
4556 // mov.fmt out_reg, temp_cond_reg
4557 }
4558 }
4559 }
4560 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004561 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004562 // Moving int/long on float/double condition.
4563 can_move_conditionally = true;
4564 if (is_true_value_zero_constant) {
4565 // mfc1 TMP, temp_cond_reg
4566 // seleqz out_reg, false_reg, TMP
4567 use_const_for_true_in = true;
4568 } else if (is_false_value_zero_constant) {
4569 // mfc1 TMP, temp_cond_reg
4570 // selnez out_reg, true_reg, TMP
4571 use_const_for_false_in = true;
4572 } else {
4573 // mfc1 TMP, temp_cond_reg
4574 // selnez AT, true_reg, TMP
4575 // seleqz TMP, false_reg, TMP
4576 // or out_reg, AT, TMP
4577 }
4578 } else {
4579 // Moving float/double on float/double condition.
4580 can_move_conditionally = true;
4581 if (is_true_value_zero_constant) {
4582 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4583 use_const_for_true_in = true;
4584 } else if (is_false_value_zero_constant) {
4585 // selnez.fmt out_reg, true_reg, temp_cond_reg
4586 use_const_for_false_in = true;
4587 } else {
4588 // sel.fmt temp_cond_reg, false_reg, true_reg
4589 // mov.fmt out_reg, temp_cond_reg
4590 }
4591 }
4592 }
4593 }
4594
4595 if (can_move_conditionally) {
4596 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4597 } else {
4598 DCHECK(!use_const_for_false_in);
4599 DCHECK(!use_const_for_true_in);
4600 }
4601
4602 if (locations_to_set != nullptr) {
4603 if (use_const_for_false_in) {
4604 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4605 } else {
4606 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004607 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004608 ? Location::RequiresFpuRegister()
4609 : Location::RequiresRegister());
4610 }
4611 if (use_const_for_true_in) {
4612 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4613 } else {
4614 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004615 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004616 ? Location::RequiresFpuRegister()
4617 : Location::RequiresRegister());
4618 }
4619 if (materialized) {
4620 locations_to_set->SetInAt(2, Location::RequiresRegister());
4621 }
4622
4623 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004624 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004625 ? Location::RequiresFpuRegister()
4626 : Location::RequiresRegister());
4627 } else {
4628 locations_to_set->SetOut(Location::SameAsFirstInput());
4629 }
4630 }
4631
4632 return can_move_conditionally;
4633}
4634
4635
4636void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4637 LocationSummary* locations = select->GetLocations();
4638 Location dst = locations->Out();
4639 Location false_src = locations->InAt(0);
4640 Location true_src = locations->InAt(1);
4641 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4642 GpuRegister cond_reg = TMP;
4643 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004644 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004645 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004646 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004647
4648 if (IsBooleanValueOrMaterializedCondition(cond)) {
4649 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4650 } else {
4651 HCondition* condition = cond->AsCondition();
4652 LocationSummary* cond_locations = cond->GetLocations();
4653 IfCondition if_cond = condition->GetCondition();
4654 cond_type = condition->InputAt(0)->GetType();
4655 switch (cond_type) {
4656 default:
4657 cond_inverted = MaterializeIntLongCompare(if_cond,
4658 /* is64bit */ false,
4659 cond_locations,
4660 cond_reg);
4661 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004662 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004663 cond_inverted = MaterializeIntLongCompare(if_cond,
4664 /* is64bit */ true,
4665 cond_locations,
4666 cond_reg);
4667 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004668 case DataType::Type::kFloat32:
4669 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004670 cond_inverted = MaterializeFpCompare(if_cond,
4671 condition->IsGtBias(),
4672 cond_type,
4673 cond_locations,
4674 fcond_reg);
4675 break;
4676 }
4677 }
4678
4679 if (true_src.IsConstant()) {
4680 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4681 }
4682 if (false_src.IsConstant()) {
4683 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4684 }
4685
4686 switch (dst_type) {
4687 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004688 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004689 __ Mfc1(cond_reg, fcond_reg);
4690 }
4691 if (true_src.IsConstant()) {
4692 if (cond_inverted) {
4693 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4694 } else {
4695 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4696 }
4697 } else if (false_src.IsConstant()) {
4698 if (cond_inverted) {
4699 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4700 } else {
4701 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4702 }
4703 } else {
4704 DCHECK_NE(cond_reg, AT);
4705 if (cond_inverted) {
4706 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4707 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4708 } else {
4709 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4710 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4711 }
4712 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4713 }
4714 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004715 case DataType::Type::kFloat32: {
4716 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004717 // sel*.fmt tests bit 0 of the condition register, account for that.
4718 __ Sltu(TMP, ZERO, cond_reg);
4719 __ Mtc1(TMP, fcond_reg);
4720 }
4721 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4722 if (true_src.IsConstant()) {
4723 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4724 if (cond_inverted) {
4725 __ SelnezS(dst_reg, src_reg, fcond_reg);
4726 } else {
4727 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4728 }
4729 } else if (false_src.IsConstant()) {
4730 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4731 if (cond_inverted) {
4732 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4733 } else {
4734 __ SelnezS(dst_reg, src_reg, fcond_reg);
4735 }
4736 } else {
4737 if (cond_inverted) {
4738 __ SelS(fcond_reg,
4739 true_src.AsFpuRegister<FpuRegister>(),
4740 false_src.AsFpuRegister<FpuRegister>());
4741 } else {
4742 __ SelS(fcond_reg,
4743 false_src.AsFpuRegister<FpuRegister>(),
4744 true_src.AsFpuRegister<FpuRegister>());
4745 }
4746 __ MovS(dst_reg, fcond_reg);
4747 }
4748 break;
4749 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004750 case DataType::Type::kFloat64: {
4751 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004752 // sel*.fmt tests bit 0 of the condition register, account for that.
4753 __ Sltu(TMP, ZERO, cond_reg);
4754 __ Mtc1(TMP, fcond_reg);
4755 }
4756 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4757 if (true_src.IsConstant()) {
4758 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4759 if (cond_inverted) {
4760 __ SelnezD(dst_reg, src_reg, fcond_reg);
4761 } else {
4762 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4763 }
4764 } else if (false_src.IsConstant()) {
4765 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4766 if (cond_inverted) {
4767 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4768 } else {
4769 __ SelnezD(dst_reg, src_reg, fcond_reg);
4770 }
4771 } else {
4772 if (cond_inverted) {
4773 __ SelD(fcond_reg,
4774 true_src.AsFpuRegister<FpuRegister>(),
4775 false_src.AsFpuRegister<FpuRegister>());
4776 } else {
4777 __ SelD(fcond_reg,
4778 false_src.AsFpuRegister<FpuRegister>(),
4779 true_src.AsFpuRegister<FpuRegister>());
4780 }
4781 __ MovD(dst_reg, fcond_reg);
4782 }
4783 break;
4784 }
4785 }
4786}
4787
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004788void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004789 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004790 LocationSummary(flag, LocationSummary::kNoCall);
4791 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004792}
4793
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004794void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4795 __ LoadFromOffset(kLoadWord,
4796 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4797 SP,
4798 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004799}
4800
David Brazdil74eb1b22015-12-14 11:44:01 +00004801void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004802 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004803 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004804}
4805
4806void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004807 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4808 GenConditionalMove(select);
4809 } else {
4810 LocationSummary* locations = select->GetLocations();
4811 Mips64Label false_target;
4812 GenerateTestAndBranch(select,
4813 /* condition_input_index */ 2,
4814 /* true_target */ nullptr,
4815 &false_target);
4816 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4817 __ Bind(&false_target);
4818 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004819}
4820
David Srbecky0cf44932015-12-09 14:09:59 +00004821void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004822 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004823}
4824
David Srbeckyd28f4a02016-03-14 17:14:24 +00004825void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4826 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004827}
4828
4829void CodeGeneratorMIPS64::GenerateNop() {
4830 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004831}
4832
Alexey Frunze4dda3372015-06-01 18:31:49 -07004833void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004834 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004835 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004836 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004837 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004838 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004839 instruction,
4840 object_field_get_with_read_barrier
4841 ? LocationSummary::kCallOnSlowPath
4842 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004843 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4844 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4845 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004846 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004847 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004848 locations->SetOut(Location::RequiresFpuRegister());
4849 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004850 // The output overlaps in the case of an object field get with
4851 // read barriers enabled: we do not want the move to overwrite the
4852 // object's location, as we need it to emit the read barrier.
4853 locations->SetOut(Location::RequiresRegister(),
4854 object_field_get_with_read_barrier
4855 ? Location::kOutputOverlap
4856 : Location::kNoOutputOverlap);
4857 }
4858 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4859 // We need a temporary register for the read barrier marking slow
4860 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004861 if (!kBakerReadBarrierThunksEnableForFields) {
4862 locations->AddTemp(Location::RequiresRegister());
4863 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004864 }
4865}
4866
4867void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4868 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004869 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4870 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004871 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004872 Location obj_loc = locations->InAt(0);
4873 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4874 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004875 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004876 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004877 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004878 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4879
Alexey Frunze4dda3372015-06-01 18:31:49 -07004880 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004881 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004882 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004883 load_type = kLoadUnsignedByte;
4884 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004885 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004886 load_type = kLoadSignedByte;
4887 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004888 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004889 load_type = kLoadUnsignedHalfword;
4890 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004891 case DataType::Type::kInt16:
4892 load_type = kLoadSignedHalfword;
4893 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004894 case DataType::Type::kInt32:
4895 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004896 load_type = kLoadWord;
4897 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004898 case DataType::Type::kInt64:
4899 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004900 load_type = kLoadDoubleword;
4901 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004902 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004903 load_type = kLoadUnsignedWord;
4904 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004905 case DataType::Type::kUint32:
4906 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004907 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004908 LOG(FATAL) << "Unreachable type " << type;
4909 UNREACHABLE();
4910 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004911 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004912 DCHECK(dst_loc.IsRegister());
4913 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004914 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004915 // /* HeapReference<Object> */ dst = *(obj + offset)
4916 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004917 Location temp_loc =
4918 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004919 // Note that a potential implicit null check is handled in this
4920 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4921 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4922 dst_loc,
4923 obj,
4924 offset,
4925 temp_loc,
4926 /* needs_null_check */ true);
4927 if (is_volatile) {
4928 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4929 }
4930 } else {
4931 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4932 if (is_volatile) {
4933 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4934 }
4935 // If read barriers are enabled, emit read barriers other than
4936 // Baker's using a slow path (and also unpoison the loaded
4937 // reference, if heap poisoning is enabled).
4938 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4939 }
4940 } else {
4941 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4942 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004943 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004944 DCHECK(dst_loc.IsFpuRegister());
4945 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004946 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004947 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004948
Alexey Frunze15958152017-02-09 19:08:30 -08004949 // Memory barriers, in the case of references, are handled in the
4950 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004951 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004952 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004953 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004954}
4955
4956void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4957 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4958 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004959 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004960 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004961 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004962 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004963 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004964 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004965 }
4966}
4967
4968void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004969 const FieldInfo& field_info,
4970 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004971 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004972 LocationSummary* locations = instruction->GetLocations();
4973 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004974 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004975 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004976 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004977 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4978 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004979 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4980
Alexey Frunze4dda3372015-06-01 18:31:49 -07004981 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004982 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004983 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004984 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004985 store_type = kStoreByte;
4986 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004987 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004988 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004989 store_type = kStoreHalfword;
4990 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004991 case DataType::Type::kInt32:
4992 case DataType::Type::kFloat32:
4993 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004994 store_type = kStoreWord;
4995 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004996 case DataType::Type::kInt64:
4997 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004998 store_type = kStoreDoubleword;
4999 break;
Aart Bik66c158e2018-01-31 12:55:04 -08005000 case DataType::Type::kUint32:
5001 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005002 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005003 LOG(FATAL) << "Unreachable type " << type;
5004 UNREACHABLE();
5005 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005006
Alexey Frunze15958152017-02-09 19:08:30 -08005007 if (is_volatile) {
5008 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
5009 }
5010
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005011 if (value_location.IsConstant()) {
5012 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
5013 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
5014 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005015 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005016 DCHECK(value_location.IsRegister());
5017 GpuRegister src = value_location.AsRegister<GpuRegister>();
5018 if (kPoisonHeapReferences && needs_write_barrier) {
5019 // Note that in the case where `value` is a null reference,
5020 // we do not enter this block, as a null reference does not
5021 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005022 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005023 __ PoisonHeapReference(TMP, src);
5024 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
5025 } else {
5026 __ StoreToOffset(store_type, src, obj, offset, null_checker);
5027 }
5028 } else {
5029 DCHECK(value_location.IsFpuRegister());
5030 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
5031 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
5032 }
5033 }
Alexey Frunze15958152017-02-09 19:08:30 -08005034
Alexey Frunzec061de12017-02-14 13:27:23 -08005035 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005036 DCHECK(value_location.IsRegister());
5037 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005038 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005039 }
Alexey Frunze15958152017-02-09 19:08:30 -08005040
5041 if (is_volatile) {
5042 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
5043 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005044}
5045
5046void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5047 HandleFieldGet(instruction, instruction->GetFieldInfo());
5048}
5049
5050void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5051 HandleFieldGet(instruction, instruction->GetFieldInfo());
5052}
5053
5054void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5055 HandleFieldSet(instruction, instruction->GetFieldInfo());
5056}
5057
5058void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005059 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005060}
5061
Alexey Frunze15958152017-02-09 19:08:30 -08005062void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
5063 HInstruction* instruction,
5064 Location out,
5065 uint32_t offset,
5066 Location maybe_temp,
5067 ReadBarrierOption read_barrier_option) {
5068 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5069 if (read_barrier_option == kWithReadBarrier) {
5070 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005071 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
5072 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5073 }
Alexey Frunze15958152017-02-09 19:08:30 -08005074 if (kUseBakerReadBarrier) {
5075 // Load with fast path based Baker's read barrier.
5076 // /* HeapReference<Object> */ out = *(out + offset)
5077 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5078 out,
5079 out_reg,
5080 offset,
5081 maybe_temp,
5082 /* needs_null_check */ false);
5083 } else {
5084 // Load with slow path based read barrier.
5085 // Save the value of `out` into `maybe_temp` before overwriting it
5086 // in the following move operation, as we will need it for the
5087 // read barrier below.
5088 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
5089 // /* HeapReference<Object> */ out = *(out + offset)
5090 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5091 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5092 }
5093 } else {
5094 // Plain load with no read barrier.
5095 // /* HeapReference<Object> */ out = *(out + offset)
5096 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5097 __ MaybeUnpoisonHeapReference(out_reg);
5098 }
5099}
5100
5101void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
5102 HInstruction* instruction,
5103 Location out,
5104 Location obj,
5105 uint32_t offset,
5106 Location maybe_temp,
5107 ReadBarrierOption read_barrier_option) {
5108 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5109 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
5110 if (read_barrier_option == kWithReadBarrier) {
5111 CHECK(kEmitCompilerReadBarrier);
5112 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005113 if (!kBakerReadBarrierThunksEnableForFields) {
5114 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5115 }
Alexey Frunze15958152017-02-09 19:08:30 -08005116 // Load with fast path based Baker's read barrier.
5117 // /* HeapReference<Object> */ out = *(obj + offset)
5118 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5119 out,
5120 obj_reg,
5121 offset,
5122 maybe_temp,
5123 /* needs_null_check */ false);
5124 } else {
5125 // Load with slow path based read barrier.
5126 // /* HeapReference<Object> */ out = *(obj + offset)
5127 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5128 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5129 }
5130 } else {
5131 // Plain load with no read barrier.
5132 // /* HeapReference<Object> */ out = *(obj + offset)
5133 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5134 __ MaybeUnpoisonHeapReference(out_reg);
5135 }
5136}
5137
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005138static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5139 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5140 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5141 return reg - V0;
5142 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5143 return 13 + (reg - S2);
5144 } else if (reg == S8) { // One more.
5145 return 19;
5146 }
5147 LOG(FATAL) << "Unexpected register " << reg;
5148 UNREACHABLE();
5149}
5150
5151static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5152 int num = GetBakerMarkThunkNumber(reg) +
5153 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5154 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5155}
5156
5157static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5158 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5159 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5160}
5161
5162void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5163 Location root,
5164 GpuRegister obj,
5165 uint32_t offset,
5166 ReadBarrierOption read_barrier_option,
5167 Mips64Label* label_low) {
5168 if (label_low != nullptr) {
5169 DCHECK_EQ(offset, 0x5678u);
5170 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005171 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005172 if (read_barrier_option == kWithReadBarrier) {
5173 DCHECK(kEmitCompilerReadBarrier);
5174 if (kUseBakerReadBarrier) {
5175 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5176 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005177 if (kBakerReadBarrierThunksEnableForGcRoots) {
5178 // Note that we do not actually check the value of `GetIsGcMarking()`
5179 // to decide whether to mark the loaded GC root or not. Instead, we
5180 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5181 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5182 // vice versa.
5183 //
5184 // We use thunks for the slow path. That thunk checks the reference
5185 // and jumps to the entrypoint if needed.
5186 //
5187 // temp = Thread::Current()->pReadBarrierMarkReg00
5188 // // AKA &art_quick_read_barrier_mark_introspection.
5189 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5190 // if (temp != nullptr) {
5191 // temp = &gc_root_thunk<root_reg>
5192 // root = temp(root)
5193 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005194
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005195 const int32_t entry_point_offset =
5196 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5197 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5198 int16_t offset_low = Low16Bits(offset);
5199 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5200 // extension in lwu.
5201 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5202 GpuRegister base = short_offset ? obj : TMP;
5203 // Loading the entrypoint does not require a load acquire since it is only changed when
5204 // threads are suspended or running a checkpoint.
5205 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5206 if (!short_offset) {
5207 DCHECK(!label_low);
5208 __ Daui(base, obj, offset_high);
5209 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005210 Mips64Label skip_call;
5211 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005212 if (label_low != nullptr) {
5213 DCHECK(short_offset);
5214 __ Bind(label_low);
5215 }
5216 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5217 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5218 // in delay slot.
5219 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005220 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005221 } else {
5222 // Note that we do not actually check the value of `GetIsGcMarking()`
5223 // to decide whether to mark the loaded GC root or not. Instead, we
5224 // load into `temp` (T9) the read barrier mark entry point corresponding
5225 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5226 // is false, and vice versa.
5227 //
5228 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5229 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5230 // if (temp != null) {
5231 // root = temp(root)
5232 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005233
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005234 if (label_low != nullptr) {
5235 __ Bind(label_low);
5236 }
5237 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5238 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5239 static_assert(
5240 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5241 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5242 "have different sizes.");
5243 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5244 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5245 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005246
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005247 // Slow path marking the GC root `root`.
5248 Location temp = Location::RegisterLocation(T9);
5249 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005250 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005251 instruction,
5252 root,
5253 /*entrypoint*/ temp);
5254 codegen_->AddSlowPath(slow_path);
5255
5256 const int32_t entry_point_offset =
5257 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5258 // Loading the entrypoint does not require a load acquire since it is only changed when
5259 // threads are suspended or running a checkpoint.
5260 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5261 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5262 __ Bind(slow_path->GetExitLabel());
5263 }
Alexey Frunze15958152017-02-09 19:08:30 -08005264 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005265 if (label_low != nullptr) {
5266 __ Bind(label_low);
5267 }
Alexey Frunze15958152017-02-09 19:08:30 -08005268 // GC root loaded through a slow path for read barriers other
5269 // than Baker's.
5270 // /* GcRoot<mirror::Object>* */ root = obj + offset
5271 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5272 // /* mirror::Object* */ root = root->Read()
5273 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5274 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005275 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005276 if (label_low != nullptr) {
5277 __ Bind(label_low);
5278 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005279 // Plain GC root load with no read barrier.
5280 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5281 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5282 // Note that GC roots are not affected by heap poisoning, thus we
5283 // do not have to unpoison `root_reg` here.
5284 }
5285}
5286
Alexey Frunze15958152017-02-09 19:08:30 -08005287void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5288 Location ref,
5289 GpuRegister obj,
5290 uint32_t offset,
5291 Location temp,
5292 bool needs_null_check) {
5293 DCHECK(kEmitCompilerReadBarrier);
5294 DCHECK(kUseBakerReadBarrier);
5295
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005296 if (kBakerReadBarrierThunksEnableForFields) {
5297 // Note that we do not actually check the value of `GetIsGcMarking()`
5298 // to decide whether to mark the loaded reference or not. Instead, we
5299 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5300 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5301 // vice versa.
5302 //
5303 // We use thunks for the slow path. That thunk checks the reference
5304 // and jumps to the entrypoint if needed. If the holder is not gray,
5305 // it issues a load-load memory barrier and returns to the original
5306 // reference load.
5307 //
5308 // temp = Thread::Current()->pReadBarrierMarkReg00
5309 // // AKA &art_quick_read_barrier_mark_introspection.
5310 // if (temp != nullptr) {
5311 // temp = &field_array_thunk<holder_reg>
5312 // temp()
5313 // }
5314 // not_gray_return_address:
5315 // // If the offset is too large to fit into the lw instruction, we
5316 // // use an adjusted base register (TMP) here. This register
5317 // // receives bits 16 ... 31 of the offset before the thunk invocation
5318 // // and the thunk benefits from it.
5319 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5320 // gray_return_address:
5321
5322 DCHECK(temp.IsInvalid());
5323 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5324 const int32_t entry_point_offset =
5325 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5326 // There may have or may have not been a null check if the field offset is smaller than
5327 // the page size.
5328 // There must've been a null check in case it's actually a load from an array.
5329 // We will, however, perform an explicit null check in the thunk as it's easier to
5330 // do it than not.
5331 if (instruction->IsArrayGet()) {
5332 DCHECK(!needs_null_check);
5333 }
5334 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5335 // Loading the entrypoint does not require a load acquire since it is only changed when
5336 // threads are suspended or running a checkpoint.
5337 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5338 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005339 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005340 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005341 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005342 __ Nop(); // In forbidden slot.
5343 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005344 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005345 // /* HeapReference<Object> */ ref = *(obj + offset)
5346 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5347 } else {
5348 int16_t offset_low = Low16Bits(offset);
5349 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005350 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005351 __ Daui(TMP, obj, offset_high); // In delay slot.
5352 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005353 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005354 // /* HeapReference<Object> */ ref = *(obj + offset)
5355 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5356 }
5357 if (needs_null_check) {
5358 MaybeRecordImplicitNullCheck(instruction);
5359 }
5360 __ MaybeUnpoisonHeapReference(ref_reg);
5361 return;
5362 }
5363
Alexey Frunze15958152017-02-09 19:08:30 -08005364 // /* HeapReference<Object> */ ref = *(obj + offset)
5365 Location no_index = Location::NoLocation();
5366 ScaleFactor no_scale_factor = TIMES_1;
5367 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5368 ref,
5369 obj,
5370 offset,
5371 no_index,
5372 no_scale_factor,
5373 temp,
5374 needs_null_check);
5375}
5376
5377void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5378 Location ref,
5379 GpuRegister obj,
5380 uint32_t data_offset,
5381 Location index,
5382 Location temp,
5383 bool needs_null_check) {
5384 DCHECK(kEmitCompilerReadBarrier);
5385 DCHECK(kUseBakerReadBarrier);
5386
5387 static_assert(
5388 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5389 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005390 ScaleFactor scale_factor = TIMES_4;
5391
5392 if (kBakerReadBarrierThunksEnableForArrays) {
5393 // Note that we do not actually check the value of `GetIsGcMarking()`
5394 // to decide whether to mark the loaded reference or not. Instead, we
5395 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5396 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5397 // vice versa.
5398 //
5399 // We use thunks for the slow path. That thunk checks the reference
5400 // and jumps to the entrypoint if needed. If the holder is not gray,
5401 // it issues a load-load memory barrier and returns to the original
5402 // reference load.
5403 //
5404 // temp = Thread::Current()->pReadBarrierMarkReg00
5405 // // AKA &art_quick_read_barrier_mark_introspection.
5406 // if (temp != nullptr) {
5407 // temp = &field_array_thunk<holder_reg>
5408 // temp()
5409 // }
5410 // not_gray_return_address:
5411 // // The element address is pre-calculated in the TMP register before the
5412 // // thunk invocation and the thunk benefits from it.
5413 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5414 // gray_return_address:
5415
5416 DCHECK(temp.IsInvalid());
5417 DCHECK(index.IsValid());
5418 const int32_t entry_point_offset =
5419 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5420 // We will not do the explicit null check in the thunk as some form of a null check
5421 // must've been done earlier.
5422 DCHECK(!needs_null_check);
5423 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5424 // Loading the entrypoint does not require a load acquire since it is only changed when
5425 // threads are suspended or running a checkpoint.
5426 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005427 Mips64Label skip_call;
5428 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005429 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5430 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5431 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5432 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005433 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005434 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5435 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5436 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5437 __ MaybeUnpoisonHeapReference(ref_reg);
5438 return;
5439 }
5440
Alexey Frunze15958152017-02-09 19:08:30 -08005441 // /* HeapReference<Object> */ ref =
5442 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005443 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5444 ref,
5445 obj,
5446 data_offset,
5447 index,
5448 scale_factor,
5449 temp,
5450 needs_null_check);
5451}
5452
5453void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5454 Location ref,
5455 GpuRegister obj,
5456 uint32_t offset,
5457 Location index,
5458 ScaleFactor scale_factor,
5459 Location temp,
5460 bool needs_null_check,
5461 bool always_update_field) {
5462 DCHECK(kEmitCompilerReadBarrier);
5463 DCHECK(kUseBakerReadBarrier);
5464
5465 // In slow path based read barriers, the read barrier call is
5466 // inserted after the original load. However, in fast path based
5467 // Baker's read barriers, we need to perform the load of
5468 // mirror::Object::monitor_ *before* the original reference load.
5469 // This load-load ordering is required by the read barrier.
5470 // The fast path/slow path (for Baker's algorithm) should look like:
5471 //
5472 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5473 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5474 // HeapReference<Object> ref = *src; // Original reference load.
5475 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5476 // if (is_gray) {
5477 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5478 // }
5479 //
5480 // Note: the original implementation in ReadBarrier::Barrier is
5481 // slightly more complex as it performs additional checks that we do
5482 // not do here for performance reasons.
5483
5484 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5485 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5486 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5487
5488 // /* int32_t */ monitor = obj->monitor_
5489 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5490 if (needs_null_check) {
5491 MaybeRecordImplicitNullCheck(instruction);
5492 }
5493 // /* LockWord */ lock_word = LockWord(monitor)
5494 static_assert(sizeof(LockWord) == sizeof(int32_t),
5495 "art::LockWord and int32_t have different sizes.");
5496
5497 __ Sync(0); // Barrier to prevent load-load reordering.
5498
5499 // The actual reference load.
5500 if (index.IsValid()) {
5501 // Load types involving an "index": ArrayGet,
5502 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5503 // intrinsics.
5504 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5505 if (index.IsConstant()) {
5506 size_t computed_offset =
5507 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5508 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5509 } else {
5510 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005511 if (scale_factor == TIMES_1) {
5512 __ Daddu(TMP, index_reg, obj);
5513 } else {
5514 __ Dlsa(TMP, index_reg, obj, scale_factor);
5515 }
Alexey Frunze15958152017-02-09 19:08:30 -08005516 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5517 }
5518 } else {
5519 // /* HeapReference<Object> */ ref = *(obj + offset)
5520 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5521 }
5522
5523 // Object* ref = ref_addr->AsMirrorPtr()
5524 __ MaybeUnpoisonHeapReference(ref_reg);
5525
5526 // Slow path marking the object `ref` when it is gray.
5527 SlowPathCodeMIPS64* slow_path;
5528 if (always_update_field) {
5529 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5530 // of the form `obj + field_offset`, where `obj` is a register and
5531 // `field_offset` is a register. Thus `offset` and `scale_factor`
5532 // above are expected to be null in this code path.
5533 DCHECK_EQ(offset, 0u);
5534 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005535 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005536 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5537 ref,
5538 obj,
5539 /* field_offset */ index,
5540 temp_reg);
5541 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005542 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005543 }
5544 AddSlowPath(slow_path);
5545
5546 // if (rb_state == ReadBarrier::GrayState())
5547 // ref = ReadBarrier::Mark(ref);
5548 // Given the numeric representation, it's enough to check the low bit of the
5549 // rb_state. We do that by shifting the bit into the sign bit (31) and
5550 // performing a branch on less than zero.
5551 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5552 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5553 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5554 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5555 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5556 __ Bind(slow_path->GetExitLabel());
5557}
5558
5559void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5560 Location out,
5561 Location ref,
5562 Location obj,
5563 uint32_t offset,
5564 Location index) {
5565 DCHECK(kEmitCompilerReadBarrier);
5566
5567 // Insert a slow path based read barrier *after* the reference load.
5568 //
5569 // If heap poisoning is enabled, the unpoisoning of the loaded
5570 // reference will be carried out by the runtime within the slow
5571 // path.
5572 //
5573 // Note that `ref` currently does not get unpoisoned (when heap
5574 // poisoning is enabled), which is alright as the `ref` argument is
5575 // not used by the artReadBarrierSlow entry point.
5576 //
5577 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005578 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005579 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5580 AddSlowPath(slow_path);
5581
5582 __ Bc(slow_path->GetEntryLabel());
5583 __ Bind(slow_path->GetExitLabel());
5584}
5585
5586void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5587 Location out,
5588 Location ref,
5589 Location obj,
5590 uint32_t offset,
5591 Location index) {
5592 if (kEmitCompilerReadBarrier) {
5593 // Baker's read barriers shall be handled by the fast path
5594 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5595 DCHECK(!kUseBakerReadBarrier);
5596 // If heap poisoning is enabled, unpoisoning will be taken care of
5597 // by the runtime within the slow path.
5598 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5599 } else if (kPoisonHeapReferences) {
5600 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5601 }
5602}
5603
5604void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5605 Location out,
5606 Location root) {
5607 DCHECK(kEmitCompilerReadBarrier);
5608
5609 // Insert a slow path based read barrier *after* the GC root load.
5610 //
5611 // Note that GC roots are not affected by heap poisoning, so we do
5612 // not need to do anything special for this here.
5613 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005614 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005615 AddSlowPath(slow_path);
5616
5617 __ Bc(slow_path->GetEntryLabel());
5618 __ Bind(slow_path->GetExitLabel());
5619}
5620
Alexey Frunze4dda3372015-06-01 18:31:49 -07005621void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005622 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5623 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005624 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005625 switch (type_check_kind) {
5626 case TypeCheckKind::kExactCheck:
5627 case TypeCheckKind::kAbstractClassCheck:
5628 case TypeCheckKind::kClassHierarchyCheck:
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005629 case TypeCheckKind::kArrayObjectCheck: {
5630 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5631 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5632 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005633 break;
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005634 }
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005635 case TypeCheckKind::kArrayCheck:
5636 case TypeCheckKind::kUnresolvedCheck:
5637 case TypeCheckKind::kInterfaceCheck:
5638 call_kind = LocationSummary::kCallOnSlowPath;
5639 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00005640 case TypeCheckKind::kBitstringCheck:
5641 break;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005642 }
5643
Vladimir Markoca6fff82017-10-03 14:49:14 +01005644 LocationSummary* locations =
5645 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005646 if (baker_read_barrier_slow_path) {
5647 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5648 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005649 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00005650 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
5651 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
5652 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
5653 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
5654 } else {
5655 locations->SetInAt(1, Location::RequiresRegister());
5656 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005657 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005658 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005659 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005660 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005661}
5662
5663void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005664 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005665 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005666 Location obj_loc = locations->InAt(0);
5667 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00005668 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08005669 Location out_loc = locations->Out();
5670 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5671 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5672 DCHECK_LE(num_temps, 1u);
5673 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005674 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5675 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5676 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5677 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005678 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005679 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005680
5681 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005682 // Avoid this check if we know `obj` is not null.
5683 if (instruction->MustDoNullCheck()) {
5684 __ Move(out, ZERO);
5685 __ Beqzc(obj, &done);
5686 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005687
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005688 switch (type_check_kind) {
5689 case TypeCheckKind::kExactCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005690 ReadBarrierOption read_barrier_option =
5691 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005692 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005693 GenerateReferenceLoadTwoRegisters(instruction,
5694 out_loc,
5695 obj_loc,
5696 class_offset,
5697 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005698 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005699 // Classes must be equal for the instanceof to succeed.
Vladimir Marko175e7862018-03-27 09:03:13 +00005700 __ Xor(out, out, cls.AsRegister<GpuRegister>());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005701 __ Sltiu(out, out, 1);
5702 break;
5703 }
5704
5705 case TypeCheckKind::kAbstractClassCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005706 ReadBarrierOption read_barrier_option =
5707 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005708 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005709 GenerateReferenceLoadTwoRegisters(instruction,
5710 out_loc,
5711 obj_loc,
5712 class_offset,
5713 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005714 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005715 // If the class is abstract, we eagerly fetch the super class of the
5716 // object to avoid doing a comparison we know will fail.
5717 Mips64Label loop;
5718 __ Bind(&loop);
5719 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005720 GenerateReferenceLoadOneRegister(instruction,
5721 out_loc,
5722 super_offset,
5723 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005724 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005725 // If `out` is null, we use it for the result, and jump to `done`.
5726 __ Beqzc(out, &done);
Vladimir Marko175e7862018-03-27 09:03:13 +00005727 __ Bnec(out, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005728 __ LoadConst32(out, 1);
5729 break;
5730 }
5731
5732 case TypeCheckKind::kClassHierarchyCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005733 ReadBarrierOption read_barrier_option =
5734 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005735 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005736 GenerateReferenceLoadTwoRegisters(instruction,
5737 out_loc,
5738 obj_loc,
5739 class_offset,
5740 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005741 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005742 // Walk over the class hierarchy to find a match.
5743 Mips64Label loop, success;
5744 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00005745 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005746 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005747 GenerateReferenceLoadOneRegister(instruction,
5748 out_loc,
5749 super_offset,
5750 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005751 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005752 __ Bnezc(out, &loop);
5753 // If `out` is null, we use it for the result, and jump to `done`.
5754 __ Bc(&done);
5755 __ Bind(&success);
5756 __ LoadConst32(out, 1);
5757 break;
5758 }
5759
5760 case TypeCheckKind::kArrayObjectCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005761 ReadBarrierOption read_barrier_option =
5762 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005763 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005764 GenerateReferenceLoadTwoRegisters(instruction,
5765 out_loc,
5766 obj_loc,
5767 class_offset,
5768 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005769 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005770 // Do an exact check.
5771 Mips64Label success;
Vladimir Marko175e7862018-03-27 09:03:13 +00005772 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005773 // Otherwise, we need to check that the object's class is a non-primitive array.
5774 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005775 GenerateReferenceLoadOneRegister(instruction,
5776 out_loc,
5777 component_offset,
5778 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005779 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005780 // If `out` is null, we use it for the result, and jump to `done`.
5781 __ Beqzc(out, &done);
5782 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5783 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5784 __ Sltiu(out, out, 1);
5785 __ Bc(&done);
5786 __ Bind(&success);
5787 __ LoadConst32(out, 1);
5788 break;
5789 }
5790
5791 case TypeCheckKind::kArrayCheck: {
5792 // No read barrier since the slow path will retry upon failure.
5793 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005794 GenerateReferenceLoadTwoRegisters(instruction,
5795 out_loc,
5796 obj_loc,
5797 class_offset,
5798 maybe_temp_loc,
5799 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005800 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005801 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5802 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005803 codegen_->AddSlowPath(slow_path);
Vladimir Marko175e7862018-03-27 09:03:13 +00005804 __ Bnec(out, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005805 __ LoadConst32(out, 1);
5806 break;
5807 }
5808
5809 case TypeCheckKind::kUnresolvedCheck:
5810 case TypeCheckKind::kInterfaceCheck: {
5811 // Note that we indeed only call on slow path, but we always go
5812 // into the slow path for the unresolved and interface check
5813 // cases.
5814 //
5815 // We cannot directly call the InstanceofNonTrivial runtime
5816 // entry point without resorting to a type checking slow path
5817 // here (i.e. by calling InvokeRuntime directly), as it would
5818 // require to assign fixed registers for the inputs of this
5819 // HInstanceOf instruction (following the runtime calling
5820 // convention), which might be cluttered by the potential first
5821 // read barrier emission at the beginning of this method.
5822 //
5823 // TODO: Introduce a new runtime entry point taking the object
5824 // to test (instead of its class) as argument, and let it deal
5825 // with the read barrier issues. This will let us refactor this
5826 // case of the `switch` code as it was previously (with a direct
5827 // call to the runtime not using a type checking slow path).
5828 // This should also be beneficial for the other cases above.
5829 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005830 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5831 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005832 codegen_->AddSlowPath(slow_path);
5833 __ Bc(slow_path->GetEntryLabel());
5834 break;
5835 }
Vladimir Marko175e7862018-03-27 09:03:13 +00005836
5837 case TypeCheckKind::kBitstringCheck: {
5838 // /* HeapReference<Class> */ temp = obj->klass_
5839 GenerateReferenceLoadTwoRegisters(instruction,
5840 out_loc,
5841 obj_loc,
5842 class_offset,
5843 maybe_temp_loc,
5844 kWithoutReadBarrier);
5845
5846 GenerateBitstringTypeCheckCompare(instruction, out);
5847 __ Sltiu(out, out, 1);
5848 break;
5849 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005850 }
5851
5852 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005853
5854 if (slow_path != nullptr) {
5855 __ Bind(slow_path->GetExitLabel());
5856 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005857}
5858
5859void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005860 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005861 locations->SetOut(Location::ConstantLocation(constant));
5862}
5863
5864void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5865 // Will be generated at use site.
5866}
5867
5868void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005869 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005870 locations->SetOut(Location::ConstantLocation(constant));
5871}
5872
5873void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5874 // Will be generated at use site.
5875}
5876
Calin Juravle175dc732015-08-25 15:42:32 +01005877void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5878 // The trampoline uses the same calling convention as dex calling conventions,
5879 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5880 // the method_idx.
5881 HandleInvoke(invoke);
5882}
5883
5884void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5885 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5886}
5887
Alexey Frunze4dda3372015-06-01 18:31:49 -07005888void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5889 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5890 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5891}
5892
5893void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5894 HandleInvoke(invoke);
5895 // The register T0 is required to be used for the hidden argument in
5896 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5897 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5898}
5899
5900void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5901 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5902 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005903 Location receiver = invoke->GetLocations()->InAt(0);
5904 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005905 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005906
5907 // Set the hidden argument.
5908 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5909 invoke->GetDexMethodIndex());
5910
5911 // temp = object->GetClass();
5912 if (receiver.IsStackSlot()) {
5913 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5914 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5915 } else {
5916 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5917 }
5918 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005919 // Instead of simply (possibly) unpoisoning `temp` here, we should
5920 // emit a read barrier for the previous class reference load.
5921 // However this is not required in practice, as this is an
5922 // intermediate/temporary reference and because the current
5923 // concurrent copying collector keeps the from-space memory
5924 // intact/accessible until the end of the marking phase (the
5925 // concurrent copying collector may not in the future).
5926 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005927 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5928 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5929 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005930 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005931 // temp = temp->GetImtEntryAt(method_offset);
5932 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5933 // T9 = temp->GetEntryPoint();
5934 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5935 // T9();
5936 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005937 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005938 DCHECK(!codegen_->IsLeafMethod());
5939 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5940}
5941
5942void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005943 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5944 if (intrinsic.TryDispatch(invoke)) {
5945 return;
5946 }
5947
Alexey Frunze4dda3372015-06-01 18:31:49 -07005948 HandleInvoke(invoke);
5949}
5950
5951void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005952 // Explicit clinit checks triggered by static invokes must have been pruned by
5953 // art::PrepareForRegisterAllocation.
5954 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005955
Chris Larsen3039e382015-08-26 07:54:08 -07005956 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5957 if (intrinsic.TryDispatch(invoke)) {
5958 return;
5959 }
5960
Alexey Frunze4dda3372015-06-01 18:31:49 -07005961 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005962}
5963
Orion Hodsonac141392017-01-13 11:53:47 +00005964void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5965 HandleInvoke(invoke);
5966}
5967
5968void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5969 codegen_->GenerateInvokePolymorphicCall(invoke);
5970}
5971
Orion Hodson4c8e12e2018-05-18 08:33:20 +01005972void LocationsBuilderMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5973 HandleInvoke(invoke);
5974}
5975
5976void InstructionCodeGeneratorMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5977 codegen_->GenerateInvokeCustomCall(invoke);
5978}
5979
Chris Larsen3039e382015-08-26 07:54:08 -07005980static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005981 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005982 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5983 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005984 return true;
5985 }
5986 return false;
5987}
5988
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005989HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005990 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005991 bool fallback_load = false;
5992 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005993 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005994 case HLoadString::LoadKind::kBootImageRelRo:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005995 case HLoadString::LoadKind::kBssEntry:
5996 DCHECK(!Runtime::Current()->UseJitCompilation());
5997 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005998 case HLoadString::LoadKind::kJitTableAddress:
5999 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006000 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01006001 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006002 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01006003 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006004 }
6005 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006006 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006007 }
6008 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006009}
6010
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006011HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
6012 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006013 bool fallback_load = false;
6014 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006015 case HLoadClass::LoadKind::kInvalid:
6016 LOG(FATAL) << "UNREACHABLE";
6017 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006018 case HLoadClass::LoadKind::kReferrersClass:
6019 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006020 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006021 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006022 case HLoadClass::LoadKind::kBssEntry:
6023 DCHECK(!Runtime::Current()->UseJitCompilation());
6024 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006025 case HLoadClass::LoadKind::kJitTableAddress:
6026 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006027 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01006028 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006029 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006030 break;
6031 }
6032 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006033 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006034 }
6035 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006036}
6037
Vladimir Markodc151b22015-10-15 18:02:30 +01006038HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
6039 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01006040 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08006041 // On MIPS64 we support all dispatch types.
6042 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01006043}
6044
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006045void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
6046 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006047 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00006048 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08006049 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
6050 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
6051
Alexey Frunze19f6c692016-11-30 19:19:55 -08006052 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006053 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00006054 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006055 uint32_t offset =
6056 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00006057 __ LoadFromOffset(kLoadDoubleword,
6058 temp.AsRegister<GpuRegister>(),
6059 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006060 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00006061 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006062 }
Vladimir Marko58155012015-08-19 12:49:41 +00006063 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00006064 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00006065 break;
Vladimir Marko65979462017-05-19 17:25:12 +01006066 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
6067 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006068 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006069 NewBootImageMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006070 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006071 NewBootImageMethodPatch(invoke->GetTargetMethod(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006072 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01006073 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6074 break;
6075 }
Vladimir Marko58155012015-08-19 12:49:41 +00006076 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006077 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
6078 kLoadDoubleword,
6079 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00006080 break;
Vladimir Markob066d432018-01-03 13:14:37 +00006081 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006082 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00006083 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset);
6084 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high);
6085 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6086 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
6087 __ Lwu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6088 break;
6089 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006090 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006091 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006092 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006093 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
6094 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
6095 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08006096 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6097 break;
6098 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006099 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
6100 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
6101 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07006102 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006103 }
6104
Alexey Frunze19f6c692016-11-30 19:19:55 -08006105 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00006106 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006107 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00006108 break;
Vladimir Marko58155012015-08-19 12:49:41 +00006109 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
6110 // T9 = callee_method->entry_point_from_quick_compiled_code_;
6111 __ LoadFromOffset(kLoadDoubleword,
6112 T9,
6113 callee_method.AsRegister<GpuRegister>(),
6114 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07006115 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00006116 // T9()
6117 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006118 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00006119 break;
6120 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006121 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
6122
Alexey Frunze4dda3372015-06-01 18:31:49 -07006123 DCHECK(!IsLeafMethod());
6124}
6125
6126void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006127 // Explicit clinit checks triggered by static invokes must have been pruned by
6128 // art::PrepareForRegisterAllocation.
6129 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006130
6131 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6132 return;
6133 }
6134
6135 LocationSummary* locations = invoke->GetLocations();
6136 codegen_->GenerateStaticOrDirectCall(invoke,
6137 locations->HasTemps()
6138 ? locations->GetTemp(0)
6139 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006140}
6141
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006142void CodeGeneratorMIPS64::GenerateVirtualCall(
6143 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006144 // Use the calling convention instead of the location of the receiver, as
6145 // intrinsics may have put the receiver in a different register. In the intrinsics
6146 // slow path, the arguments have been moved to the right place, so here we are
6147 // guaranteed that the receiver is the first register of the calling convention.
6148 InvokeDexCallingConvention calling_convention;
6149 GpuRegister receiver = calling_convention.GetRegisterAt(0);
6150
Alexey Frunze53afca12015-11-05 16:34:23 -08006151 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006152 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6153 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
6154 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006155 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006156
6157 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006158 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08006159 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006160 // Instead of simply (possibly) unpoisoning `temp` here, we should
6161 // emit a read barrier for the previous class reference load.
6162 // However this is not required in practice, as this is an
6163 // intermediate/temporary reference and because the current
6164 // concurrent copying collector keeps the from-space memory
6165 // intact/accessible until the end of the marking phase (the
6166 // concurrent copying collector may not in the future).
6167 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006168 // temp = temp->GetMethodAt(method_offset);
6169 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6170 // T9 = temp->GetEntryPoint();
6171 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6172 // T9();
6173 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006174 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006175 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006176}
6177
6178void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6179 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6180 return;
6181 }
6182
6183 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006184 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006185}
6186
6187void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006188 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006189 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006190 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006191 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6192 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006193 return;
6194 }
Vladimir Marko41559982017-01-06 14:04:23 +00006195 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006196
Alexey Frunze15958152017-02-09 19:08:30 -08006197 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6198 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006199 ? LocationSummary::kCallOnSlowPath
6200 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006201 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006202 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6203 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6204 }
Vladimir Marko41559982017-01-06 14:04:23 +00006205 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006206 locations->SetInAt(0, Location::RequiresRegister());
6207 }
6208 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006209 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6210 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6211 // Rely on the type resolution or initialization and marking to save everything we need.
6212 RegisterSet caller_saves = RegisterSet::Empty();
6213 InvokeRuntimeCallingConvention calling_convention;
6214 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6215 locations->SetCustomSlowPathCallerSaves(caller_saves);
6216 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006217 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006218 }
6219 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006220}
6221
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006222// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6223// move.
6224void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006225 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006226 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006227 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006228 return;
6229 }
Vladimir Marko41559982017-01-06 14:04:23 +00006230 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006231
Vladimir Marko41559982017-01-06 14:04:23 +00006232 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006233 Location out_loc = locations->Out();
6234 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6235 GpuRegister current_method_reg = ZERO;
6236 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006237 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006238 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6239 }
6240
Alexey Frunze15958152017-02-09 19:08:30 -08006241 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6242 ? kWithoutReadBarrier
6243 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006244 bool generate_null_check = false;
6245 switch (load_kind) {
6246 case HLoadClass::LoadKind::kReferrersClass:
6247 DCHECK(!cls->CanCallRuntime());
6248 DCHECK(!cls->MustGenerateClinitCheck());
6249 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6250 GenerateGcRootFieldLoad(cls,
6251 out_loc,
6252 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006253 ArtMethod::DeclaringClassOffset().Int32Value(),
6254 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006255 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006256 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006257 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006258 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006259 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006260 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006261 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006262 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006263 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006264 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6265 break;
6266 }
6267 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006268 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006269 uint32_t address = dchecked_integral_cast<uint32_t>(
6270 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6271 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006272 __ LoadLiteral(out,
6273 kLoadUnsignedWord,
6274 codegen_->DeduplicateBootImageAddressLiteral(address));
6275 break;
6276 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006277 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006278 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006279 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006280 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006281 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006282 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006283 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006284 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6285 __ Lwu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006286 break;
6287 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006288 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006289 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6290 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006291 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6292 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006293 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006294 GenerateGcRootFieldLoad(cls,
6295 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006296 out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006297 /* placeholder */ 0x5678,
6298 read_barrier_option,
6299 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006300 generate_null_check = true;
6301 break;
6302 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006303 case HLoadClass::LoadKind::kJitTableAddress:
6304 __ LoadLiteral(out,
6305 kLoadUnsignedWord,
6306 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6307 cls->GetTypeIndex(),
6308 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006309 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006310 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006311 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006312 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006313 LOG(FATAL) << "UNREACHABLE";
6314 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006315 }
6316
6317 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6318 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006319 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00006320 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006321 codegen_->AddSlowPath(slow_path);
6322 if (generate_null_check) {
6323 __ Beqzc(out, slow_path->GetEntryLabel());
6324 }
6325 if (cls->MustGenerateClinitCheck()) {
6326 GenerateClassInitializationCheck(slow_path, out);
6327 } else {
6328 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006329 }
6330 }
6331}
6332
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006333void LocationsBuilderMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6334 InvokeRuntimeCallingConvention calling_convention;
6335 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6336 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, loc, loc);
6337}
6338
6339void InstructionCodeGeneratorMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6340 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6341}
6342
Orion Hodson18259d72018-04-12 11:18:23 +01006343void LocationsBuilderMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6344 InvokeRuntimeCallingConvention calling_convention;
6345 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6346 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, loc, loc);
6347}
6348
6349void InstructionCodeGeneratorMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6350 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6351}
6352
David Brazdilcb1c0552015-08-04 16:22:25 +01006353static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006354 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006355}
6356
Alexey Frunze4dda3372015-06-01 18:31:49 -07006357void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6358 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006359 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006360 locations->SetOut(Location::RequiresRegister());
6361}
6362
6363void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6364 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006365 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6366}
6367
6368void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006369 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006370}
6371
6372void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6373 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006374}
6375
Alexey Frunze4dda3372015-06-01 18:31:49 -07006376void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006377 HLoadString::LoadKind load_kind = load->GetLoadKind();
6378 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006379 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006380 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006381 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006382 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006383 } else {
6384 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006385 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6386 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6387 // Rely on the pResolveString and marking to save everything we need.
6388 RegisterSet caller_saves = RegisterSet::Empty();
6389 InvokeRuntimeCallingConvention calling_convention;
6390 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6391 locations->SetCustomSlowPathCallerSaves(caller_saves);
6392 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006393 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006394 }
6395 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006396 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006397}
6398
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006399// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6400// move.
6401void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006402 HLoadString::LoadKind load_kind = load->GetLoadKind();
6403 LocationSummary* locations = load->GetLocations();
6404 Location out_loc = locations->Out();
6405 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6406
6407 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006408 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6409 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006410 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006411 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006412 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006413 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006414 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006415 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006416 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006417 }
6418 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006419 uint32_t address = dchecked_integral_cast<uint32_t>(
6420 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6421 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006422 __ LoadLiteral(out,
6423 kLoadUnsignedWord,
6424 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006425 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006426 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006427 case HLoadString::LoadKind::kBootImageRelRo: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006428 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006429 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006430 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006431 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006432 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006433 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006434 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6435 __ Lwu(out, AT, /* placeholder */ 0x5678);
6436 return;
6437 }
6438 case HLoadString::LoadKind::kBssEntry: {
6439 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6440 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6441 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6442 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6443 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006444 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006445 GenerateGcRootFieldLoad(load,
6446 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006447 out,
Alexey Frunze15958152017-02-09 19:08:30 -08006448 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006449 kCompilerReadBarrierOption,
6450 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006451 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006452 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006453 codegen_->AddSlowPath(slow_path);
6454 __ Beqzc(out, slow_path->GetEntryLabel());
6455 __ Bind(slow_path->GetExitLabel());
6456 return;
6457 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006458 case HLoadString::LoadKind::kJitTableAddress:
6459 __ LoadLiteral(out,
6460 kLoadUnsignedWord,
6461 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6462 load->GetStringIndex(),
6463 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006464 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006465 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006466 default:
6467 break;
6468 }
6469
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006470 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006471 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006472 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006473 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006474 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6475 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6476 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006477}
6478
Alexey Frunze4dda3372015-06-01 18:31:49 -07006479void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006480 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006481 locations->SetOut(Location::ConstantLocation(constant));
6482}
6483
6484void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6485 // Will be generated at use site.
6486}
6487
6488void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006489 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6490 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006491 InvokeRuntimeCallingConvention calling_convention;
6492 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6493}
6494
6495void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006496 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006497 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006498 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006499 if (instruction->IsEnter()) {
6500 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6501 } else {
6502 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6503 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006504}
6505
6506void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6507 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006508 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006509 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006510 case DataType::Type::kInt32:
6511 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006512 locations->SetInAt(0, Location::RequiresRegister());
6513 locations->SetInAt(1, Location::RequiresRegister());
6514 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6515 break;
6516
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006517 case DataType::Type::kFloat32:
6518 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006519 locations->SetInAt(0, Location::RequiresFpuRegister());
6520 locations->SetInAt(1, Location::RequiresFpuRegister());
6521 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6522 break;
6523
6524 default:
6525 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6526 }
6527}
6528
6529void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006530 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006531 LocationSummary* locations = instruction->GetLocations();
6532
6533 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006534 case DataType::Type::kInt32:
6535 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006536 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6537 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6538 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006539 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006540 __ MulR6(dst, lhs, rhs);
6541 else
6542 __ Dmul(dst, lhs, rhs);
6543 break;
6544 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006545 case DataType::Type::kFloat32:
6546 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006547 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6548 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6549 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006550 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006551 __ MulS(dst, lhs, rhs);
6552 else
6553 __ MulD(dst, lhs, rhs);
6554 break;
6555 }
6556 default:
6557 LOG(FATAL) << "Unexpected mul type " << type;
6558 }
6559}
6560
6561void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6562 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006563 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006564 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006565 case DataType::Type::kInt32:
6566 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006567 locations->SetInAt(0, Location::RequiresRegister());
6568 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6569 break;
6570
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006571 case DataType::Type::kFloat32:
6572 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006573 locations->SetInAt(0, Location::RequiresFpuRegister());
6574 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6575 break;
6576
6577 default:
6578 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6579 }
6580}
6581
6582void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006583 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006584 LocationSummary* locations = instruction->GetLocations();
6585
6586 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006587 case DataType::Type::kInt32:
6588 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006589 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6590 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006591 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006592 __ Subu(dst, ZERO, src);
6593 else
6594 __ Dsubu(dst, ZERO, src);
6595 break;
6596 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006597 case DataType::Type::kFloat32:
6598 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006599 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6600 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006601 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006602 __ NegS(dst, src);
6603 else
6604 __ NegD(dst, src);
6605 break;
6606 }
6607 default:
6608 LOG(FATAL) << "Unexpected neg type " << type;
6609 }
6610}
6611
6612void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006613 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6614 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006615 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006616 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006617 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6618 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006619}
6620
6621void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006622 // Note: if heap poisoning is enabled, the entry point takes care
6623 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006624 QuickEntrypointEnum entrypoint =
6625 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6626 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006627 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006628 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006629}
6630
6631void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006632 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6633 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006634 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07006635 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006636 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006637}
6638
6639void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07006640 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
6641 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006642}
6643
6644void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006645 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006646 locations->SetInAt(0, Location::RequiresRegister());
6647 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6648}
6649
6650void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006651 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006652 LocationSummary* locations = instruction->GetLocations();
6653
6654 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006655 case DataType::Type::kInt32:
6656 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006657 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6658 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6659 __ Nor(dst, src, ZERO);
6660 break;
6661 }
6662
6663 default:
6664 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6665 }
6666}
6667
6668void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006669 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006670 locations->SetInAt(0, Location::RequiresRegister());
6671 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6672}
6673
6674void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6675 LocationSummary* locations = instruction->GetLocations();
6676 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6677 locations->InAt(0).AsRegister<GpuRegister>(),
6678 1);
6679}
6680
6681void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006682 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6683 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006684}
6685
Calin Juravle2ae48182016-03-16 14:05:09 +00006686void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6687 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006688 return;
6689 }
6690 Location obj = instruction->GetLocations()->InAt(0);
6691
6692 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006693 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006694}
6695
Calin Juravle2ae48182016-03-16 14:05:09 +00006696void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006697 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006698 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006699 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006700
6701 Location obj = instruction->GetLocations()->InAt(0);
6702
6703 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6704}
6705
6706void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006707 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006708}
6709
6710void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6711 HandleBinaryOp(instruction);
6712}
6713
6714void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6715 HandleBinaryOp(instruction);
6716}
6717
6718void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6719 LOG(FATAL) << "Unreachable";
6720}
6721
6722void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006723 if (instruction->GetNext()->IsSuspendCheck() &&
6724 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6725 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6726 // The back edge will generate the suspend check.
6727 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6728 }
6729
Alexey Frunze4dda3372015-06-01 18:31:49 -07006730 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6731}
6732
6733void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006734 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006735 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6736 if (location.IsStackSlot()) {
6737 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6738 } else if (location.IsDoubleStackSlot()) {
6739 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6740 }
6741 locations->SetOut(location);
6742}
6743
6744void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6745 ATTRIBUTE_UNUSED) {
6746 // Nothing to do, the parameter is already at its location.
6747}
6748
6749void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6750 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006751 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006752 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6753}
6754
6755void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6756 ATTRIBUTE_UNUSED) {
6757 // Nothing to do, the method is already at its location.
6758}
6759
6760void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006761 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006762 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006763 locations->SetInAt(i, Location::Any());
6764 }
6765 locations->SetOut(Location::Any());
6766}
6767
6768void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6769 LOG(FATAL) << "Unreachable";
6770}
6771
6772void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006773 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006774 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006775 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6776 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006777 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006778
6779 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006780 case DataType::Type::kInt32:
6781 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006782 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006783 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006784 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6785 break;
6786
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006787 case DataType::Type::kFloat32:
6788 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006789 InvokeRuntimeCallingConvention calling_convention;
6790 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6791 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6792 locations->SetOut(calling_convention.GetReturnLocation(type));
6793 break;
6794 }
6795
6796 default:
6797 LOG(FATAL) << "Unexpected rem type " << type;
6798 }
6799}
6800
6801void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006802 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006803
6804 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006805 case DataType::Type::kInt32:
6806 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006807 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006808 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006809
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006810 case DataType::Type::kFloat32:
6811 case DataType::Type::kFloat64: {
6812 QuickEntrypointEnum entrypoint =
6813 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006814 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006815 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006816 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6817 } else {
6818 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6819 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006820 break;
6821 }
6822 default:
6823 LOG(FATAL) << "Unexpected rem type " << type;
6824 }
6825}
6826
Aart Bik1f8d51b2018-02-15 10:42:37 -08006827static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
6828 LocationSummary* locations = new (allocator) LocationSummary(minmax);
6829 switch (minmax->GetResultType()) {
6830 case DataType::Type::kInt32:
6831 case DataType::Type::kInt64:
6832 locations->SetInAt(0, Location::RequiresRegister());
6833 locations->SetInAt(1, Location::RequiresRegister());
6834 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6835 break;
6836 case DataType::Type::kFloat32:
6837 case DataType::Type::kFloat64:
6838 locations->SetInAt(0, Location::RequiresFpuRegister());
6839 locations->SetInAt(1, Location::RequiresFpuRegister());
6840 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6841 break;
6842 default:
6843 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
6844 }
6845}
6846
Aart Bik351df3e2018-03-07 11:54:57 -08006847void InstructionCodeGeneratorMIPS64::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08006848 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6849 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6850 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6851
6852 if (lhs == rhs) {
6853 if (out != lhs) {
6854 __ Move(out, lhs);
6855 }
6856 } else {
6857 // Some architectures, such as ARM and MIPS (prior to r6), have a
6858 // conditional move instruction which only changes the target
6859 // (output) register if the condition is true (MIPS prior to r6 had
6860 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
6861 // change the target (output) register. If the condition is true the
6862 // output register gets the contents of the "rs" register; otherwise,
6863 // the output register is set to zero. One consequence of this is
6864 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
6865 // needs to use a pair of SELEQZ/SELNEZ instructions. After
6866 // executing this pair of instructions one of the output registers
6867 // from the pair will necessarily contain zero. Then the code ORs the
6868 // output registers from the SELEQZ/SELNEZ instructions to get the
6869 // final result.
6870 //
6871 // The initial test to see if the output register is same as the
6872 // first input register is needed to make sure that value in the
6873 // first input register isn't clobbered before we've finished
6874 // computing the output value. The logic in the corresponding else
6875 // clause performs the same task but makes sure the second input
6876 // register isn't clobbered in the event that it's the same register
6877 // as the output register; the else clause also handles the case
6878 // where the output register is distinct from both the first, and the
6879 // second input registers.
6880 if (out == lhs) {
6881 __ Slt(AT, rhs, lhs);
6882 if (is_min) {
6883 __ Seleqz(out, lhs, AT);
6884 __ Selnez(AT, rhs, AT);
6885 } else {
6886 __ Selnez(out, lhs, AT);
6887 __ Seleqz(AT, rhs, AT);
6888 }
6889 } else {
6890 __ Slt(AT, lhs, rhs);
6891 if (is_min) {
6892 __ Seleqz(out, rhs, AT);
6893 __ Selnez(AT, lhs, AT);
6894 } else {
6895 __ Selnez(out, rhs, AT);
6896 __ Seleqz(AT, lhs, AT);
6897 }
6898 }
6899 __ Or(out, out, AT);
6900 }
6901}
6902
6903void InstructionCodeGeneratorMIPS64::GenerateMinMaxFP(LocationSummary* locations,
6904 bool is_min,
6905 DataType::Type type) {
6906 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
6907 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
6908 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
6909
6910 Mips64Label noNaNs;
6911 Mips64Label done;
6912 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
6913
6914 // When Java computes min/max it prefers a NaN to a number; the
6915 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
6916 // the inputs is a NaN and the other is a valid number, the MIPS
6917 // instruction will return the number; Java wants the NaN value
6918 // returned. This is why there is extra logic preceding the use of
6919 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
6920 // NaN, return the NaN, otherwise return the min/max.
6921 if (type == DataType::Type::kFloat64) {
6922 __ CmpUnD(FTMP, a, b);
6923 __ Bc1eqz(FTMP, &noNaNs);
6924
6925 // One of the inputs is a NaN
6926 __ CmpEqD(ftmp, a, a);
6927 // If a == a then b is the NaN, otherwise a is the NaN.
6928 __ SelD(ftmp, a, b);
6929
6930 if (ftmp != out) {
6931 __ MovD(out, ftmp);
6932 }
6933
6934 __ Bc(&done);
6935
6936 __ Bind(&noNaNs);
6937
6938 if (is_min) {
6939 __ MinD(out, a, b);
6940 } else {
6941 __ MaxD(out, a, b);
6942 }
6943 } else {
6944 DCHECK_EQ(type, DataType::Type::kFloat32);
6945 __ CmpUnS(FTMP, a, b);
6946 __ Bc1eqz(FTMP, &noNaNs);
6947
6948 // One of the inputs is a NaN
6949 __ CmpEqS(ftmp, a, a);
6950 // If a == a then b is the NaN, otherwise a is the NaN.
6951 __ SelS(ftmp, a, b);
6952
6953 if (ftmp != out) {
6954 __ MovS(out, ftmp);
6955 }
6956
6957 __ Bc(&done);
6958
6959 __ Bind(&noNaNs);
6960
6961 if (is_min) {
6962 __ MinS(out, a, b);
6963 } else {
6964 __ MaxS(out, a, b);
6965 }
6966 }
6967
6968 __ Bind(&done);
6969}
6970
Aart Bik351df3e2018-03-07 11:54:57 -08006971void InstructionCodeGeneratorMIPS64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
6972 DataType::Type type = minmax->GetResultType();
6973 switch (type) {
6974 case DataType::Type::kInt32:
6975 case DataType::Type::kInt64:
6976 GenerateMinMaxInt(minmax->GetLocations(), is_min);
6977 break;
6978 case DataType::Type::kFloat32:
6979 case DataType::Type::kFloat64:
6980 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
6981 break;
6982 default:
6983 LOG(FATAL) << "Unexpected type for HMinMax " << type;
6984 }
6985}
6986
Aart Bik1f8d51b2018-02-15 10:42:37 -08006987void LocationsBuilderMIPS64::VisitMin(HMin* min) {
6988 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
6989}
6990
6991void InstructionCodeGeneratorMIPS64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08006992 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08006993}
6994
6995void LocationsBuilderMIPS64::VisitMax(HMax* max) {
6996 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
6997}
6998
6999void InstructionCodeGeneratorMIPS64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08007000 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08007001}
7002
Aart Bik3dad3412018-02-28 12:01:46 -08007003void LocationsBuilderMIPS64::VisitAbs(HAbs* abs) {
7004 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
7005 switch (abs->GetResultType()) {
7006 case DataType::Type::kInt32:
7007 case DataType::Type::kInt64:
7008 locations->SetInAt(0, Location::RequiresRegister());
7009 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7010 break;
7011 case DataType::Type::kFloat32:
7012 case DataType::Type::kFloat64:
7013 locations->SetInAt(0, Location::RequiresFpuRegister());
7014 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7015 break;
7016 default:
7017 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7018 }
7019}
7020
7021void InstructionCodeGeneratorMIPS64::VisitAbs(HAbs* abs) {
7022 LocationSummary* locations = abs->GetLocations();
7023 switch (abs->GetResultType()) {
7024 case DataType::Type::kInt32: {
7025 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7026 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7027 __ Sra(AT, in, 31);
7028 __ Xor(out, in, AT);
7029 __ Subu(out, out, AT);
7030 break;
7031 }
7032 case DataType::Type::kInt64: {
7033 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7034 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7035 __ Dsra32(AT, in, 31);
7036 __ Xor(out, in, AT);
7037 __ Dsubu(out, out, AT);
7038 break;
7039 }
7040 case DataType::Type::kFloat32: {
7041 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7042 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7043 __ AbsS(out, in);
7044 break;
7045 }
7046 case DataType::Type::kFloat64: {
7047 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7048 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7049 __ AbsD(out, in);
7050 break;
7051 }
7052 default:
7053 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7054 }
7055}
7056
Igor Murashkind01745e2017-04-05 16:40:31 -07007057void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
7058 constructor_fence->SetLocations(nullptr);
7059}
7060
7061void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
7062 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
7063 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
7064}
7065
Alexey Frunze4dda3372015-06-01 18:31:49 -07007066void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7067 memory_barrier->SetLocations(nullptr);
7068}
7069
7070void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7071 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
7072}
7073
7074void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007075 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007076 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007077 locations->SetInAt(0, Mips64ReturnLocation(return_type));
7078}
7079
7080void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
7081 codegen_->GenerateFrameExit();
7082}
7083
7084void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
7085 ret->SetLocations(nullptr);
7086}
7087
7088void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
7089 codegen_->GenerateFrameExit();
7090}
7091
Alexey Frunze92d90602015-12-18 18:16:36 -08007092void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
7093 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007094}
7095
Alexey Frunze92d90602015-12-18 18:16:36 -08007096void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
7097 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007098}
7099
Alexey Frunze4dda3372015-06-01 18:31:49 -07007100void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
7101 HandleShift(shl);
7102}
7103
7104void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
7105 HandleShift(shl);
7106}
7107
7108void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
7109 HandleShift(shr);
7110}
7111
7112void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
7113 HandleShift(shr);
7114}
7115
Alexey Frunze4dda3372015-06-01 18:31:49 -07007116void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
7117 HandleBinaryOp(instruction);
7118}
7119
7120void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
7121 HandleBinaryOp(instruction);
7122}
7123
7124void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7125 HandleFieldGet(instruction, instruction->GetFieldInfo());
7126}
7127
7128void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7129 HandleFieldGet(instruction, instruction->GetFieldInfo());
7130}
7131
7132void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
7133 HandleFieldSet(instruction, instruction->GetFieldInfo());
7134}
7135
7136void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01007137 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007138}
7139
Calin Juravlee460d1d2015-09-29 04:52:17 +01007140void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
7141 HUnresolvedInstanceFieldGet* instruction) {
7142 FieldAccessCallingConventionMIPS64 calling_convention;
7143 codegen_->CreateUnresolvedFieldLocationSummary(
7144 instruction, instruction->GetFieldType(), calling_convention);
7145}
7146
7147void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
7148 HUnresolvedInstanceFieldGet* instruction) {
7149 FieldAccessCallingConventionMIPS64 calling_convention;
7150 codegen_->GenerateUnresolvedFieldAccess(instruction,
7151 instruction->GetFieldType(),
7152 instruction->GetFieldIndex(),
7153 instruction->GetDexPc(),
7154 calling_convention);
7155}
7156
7157void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
7158 HUnresolvedInstanceFieldSet* instruction) {
7159 FieldAccessCallingConventionMIPS64 calling_convention;
7160 codegen_->CreateUnresolvedFieldLocationSummary(
7161 instruction, instruction->GetFieldType(), calling_convention);
7162}
7163
7164void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
7165 HUnresolvedInstanceFieldSet* instruction) {
7166 FieldAccessCallingConventionMIPS64 calling_convention;
7167 codegen_->GenerateUnresolvedFieldAccess(instruction,
7168 instruction->GetFieldType(),
7169 instruction->GetFieldIndex(),
7170 instruction->GetDexPc(),
7171 calling_convention);
7172}
7173
7174void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
7175 HUnresolvedStaticFieldGet* instruction) {
7176 FieldAccessCallingConventionMIPS64 calling_convention;
7177 codegen_->CreateUnresolvedFieldLocationSummary(
7178 instruction, instruction->GetFieldType(), calling_convention);
7179}
7180
7181void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
7182 HUnresolvedStaticFieldGet* instruction) {
7183 FieldAccessCallingConventionMIPS64 calling_convention;
7184 codegen_->GenerateUnresolvedFieldAccess(instruction,
7185 instruction->GetFieldType(),
7186 instruction->GetFieldIndex(),
7187 instruction->GetDexPc(),
7188 calling_convention);
7189}
7190
7191void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
7192 HUnresolvedStaticFieldSet* instruction) {
7193 FieldAccessCallingConventionMIPS64 calling_convention;
7194 codegen_->CreateUnresolvedFieldLocationSummary(
7195 instruction, instruction->GetFieldType(), calling_convention);
7196}
7197
7198void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
7199 HUnresolvedStaticFieldSet* instruction) {
7200 FieldAccessCallingConventionMIPS64 calling_convention;
7201 codegen_->GenerateUnresolvedFieldAccess(instruction,
7202 instruction->GetFieldType(),
7203 instruction->GetFieldIndex(),
7204 instruction->GetDexPc(),
7205 calling_convention);
7206}
7207
Alexey Frunze4dda3372015-06-01 18:31:49 -07007208void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007209 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7210 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02007211 // In suspend check slow path, usually there are no caller-save registers at all.
7212 // If SIMD instructions are present, however, we force spilling all live SIMD
7213 // registers in full width (since the runtime only saves/restores lower part).
7214 locations->SetCustomSlowPathCallerSaves(
7215 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007216}
7217
7218void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
7219 HBasicBlock* block = instruction->GetBlock();
7220 if (block->GetLoopInformation() != nullptr) {
7221 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
7222 // The back edge will generate the suspend check.
7223 return;
7224 }
7225 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
7226 // The goto will generate the suspend check.
7227 return;
7228 }
7229 GenerateSuspendCheck(instruction, nullptr);
7230}
7231
Alexey Frunze4dda3372015-06-01 18:31:49 -07007232void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007233 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7234 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007235 InvokeRuntimeCallingConvention calling_convention;
7236 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7237}
7238
7239void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01007240 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007241 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7242}
7243
7244void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007245 DataType::Type input_type = conversion->GetInputType();
7246 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007247 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7248 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007249
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007250 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
7251 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007252 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
7253 }
7254
Vladimir Markoca6fff82017-10-03 14:49:14 +01007255 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007256
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007257 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007258 locations->SetInAt(0, Location::RequiresFpuRegister());
7259 } else {
7260 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007261 }
7262
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007263 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007264 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007265 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007266 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007267 }
7268}
7269
7270void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
7271 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007272 DataType::Type result_type = conversion->GetResultType();
7273 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007274
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007275 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7276 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007277
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007278 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007279 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7280 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
7281
7282 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007283 case DataType::Type::kUint8:
7284 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007285 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007286 case DataType::Type::kInt8:
7287 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007288 // Type conversion from long to types narrower than int is a result of code
7289 // transformations. To avoid unpredictable results for SEB and SEH, we first
7290 // need to sign-extend the low 32-bit value into bits 32 through 63.
7291 __ Sll(dst, src, 0);
7292 __ Seb(dst, dst);
7293 } else {
7294 __ Seb(dst, src);
7295 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007296 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007297 case DataType::Type::kUint16:
7298 __ Andi(dst, src, 0xFFFF);
7299 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007300 case DataType::Type::kInt16:
7301 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007302 // Type conversion from long to types narrower than int is a result of code
7303 // transformations. To avoid unpredictable results for SEB and SEH, we first
7304 // need to sign-extend the low 32-bit value into bits 32 through 63.
7305 __ Sll(dst, src, 0);
7306 __ Seh(dst, dst);
7307 } else {
7308 __ Seh(dst, src);
7309 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007310 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007311 case DataType::Type::kInt32:
7312 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007313 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
7314 // conversions, except when the input and output registers are the same and we are not
7315 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007316 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007317 __ Sll(dst, src, 0);
7318 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007319 break;
7320
7321 default:
7322 LOG(FATAL) << "Unexpected type conversion from " << input_type
7323 << " to " << result_type;
7324 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007325 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007326 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7327 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007328 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007329 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007330 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007331 __ Cvtsl(dst, FTMP);
7332 } else {
7333 __ Cvtdl(dst, FTMP);
7334 }
7335 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007336 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007337 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007338 __ Cvtsw(dst, FTMP);
7339 } else {
7340 __ Cvtdw(dst, FTMP);
7341 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007342 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007343 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
7344 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007345 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7346 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007347
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007348 if (result_type == DataType::Type::kInt64) {
7349 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007350 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007351 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007352 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007353 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007354 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007355 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007356 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007357 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007358 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007359 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007360 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007361 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007362 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007363 } else if (DataType::IsFloatingPointType(result_type) &&
7364 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007365 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7366 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007367 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007368 __ Cvtsd(dst, src);
7369 } else {
7370 __ Cvtds(dst, src);
7371 }
7372 } else {
7373 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
7374 << " to " << result_type;
7375 }
7376}
7377
7378void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
7379 HandleShift(ushr);
7380}
7381
7382void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
7383 HandleShift(ushr);
7384}
7385
7386void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
7387 HandleBinaryOp(instruction);
7388}
7389
7390void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
7391 HandleBinaryOp(instruction);
7392}
7393
7394void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7395 // Nothing to do, this should be removed during prepare for register allocator.
7396 LOG(FATAL) << "Unreachable";
7397}
7398
7399void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7400 // Nothing to do, this should be removed during prepare for register allocator.
7401 LOG(FATAL) << "Unreachable";
7402}
7403
7404void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007405 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007406}
7407
7408void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007409 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007410}
7411
7412void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007413 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007414}
7415
7416void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007417 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007418}
7419
7420void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007421 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007422}
7423
7424void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007425 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007426}
7427
7428void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007429 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007430}
7431
7432void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007433 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007434}
7435
7436void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007437 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007438}
7439
7440void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007441 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007442}
7443
7444void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007445 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007446}
7447
7448void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007449 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007450}
7451
Aart Bike9f37602015-10-09 11:15:55 -07007452void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007453 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007454}
7455
7456void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007457 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007458}
7459
7460void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007461 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007462}
7463
7464void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007465 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007466}
7467
7468void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007469 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007470}
7471
7472void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007473 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007474}
7475
7476void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007477 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007478}
7479
7480void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007481 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007482}
7483
Mark Mendellfe57faa2015-09-18 09:26:15 -04007484// Simple implementation of packed switch - generate cascaded compare/jumps.
7485void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7486 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007487 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007488 locations->SetInAt(0, Location::RequiresRegister());
7489}
7490
Alexey Frunze0960ac52016-12-20 17:24:59 -08007491void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7492 int32_t lower_bound,
7493 uint32_t num_entries,
7494 HBasicBlock* switch_block,
7495 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007496 // Create a set of compare/jumps.
7497 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007498 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007499 // Jump to default if index is negative
7500 // Note: We don't check the case that index is positive while value < lower_bound, because in
7501 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7502 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7503
Alexey Frunze0960ac52016-12-20 17:24:59 -08007504 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007505 // Jump to successors[0] if value == lower_bound.
7506 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7507 int32_t last_index = 0;
7508 for (; num_entries - last_index > 2; last_index += 2) {
7509 __ Addiu(temp_reg, temp_reg, -2);
7510 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7511 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7512 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7513 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7514 }
7515 if (num_entries - last_index == 2) {
7516 // The last missing case_value.
7517 __ Addiu(temp_reg, temp_reg, -1);
7518 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007519 }
7520
7521 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007522 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007523 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007524 }
7525}
7526
Alexey Frunze0960ac52016-12-20 17:24:59 -08007527void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7528 int32_t lower_bound,
7529 uint32_t num_entries,
7530 HBasicBlock* switch_block,
7531 HBasicBlock* default_block) {
7532 // Create a jump table.
7533 std::vector<Mips64Label*> labels(num_entries);
7534 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7535 for (uint32_t i = 0; i < num_entries; i++) {
7536 labels[i] = codegen_->GetLabelOf(successors[i]);
7537 }
7538 JumpTable* table = __ CreateJumpTable(std::move(labels));
7539
7540 // Is the value in range?
7541 __ Addiu32(TMP, value_reg, -lower_bound);
7542 __ LoadConst32(AT, num_entries);
7543 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7544
7545 // We are in the range of the table.
7546 // Load the target address from the jump table, indexing by the value.
7547 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007548 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007549 __ Lw(TMP, TMP, 0);
7550 // Compute the absolute target address by adding the table start address
7551 // (the table contains offsets to targets relative to its start).
7552 __ Daddu(TMP, TMP, AT);
7553 // And jump.
7554 __ Jr(TMP);
7555 __ Nop();
7556}
7557
7558void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7559 int32_t lower_bound = switch_instr->GetStartValue();
7560 uint32_t num_entries = switch_instr->GetNumEntries();
7561 LocationSummary* locations = switch_instr->GetLocations();
7562 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7563 HBasicBlock* switch_block = switch_instr->GetBlock();
7564 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7565
7566 if (num_entries > kPackedSwitchJumpTableThreshold) {
7567 GenTableBasedPackedSwitch(value_reg,
7568 lower_bound,
7569 num_entries,
7570 switch_block,
7571 default_block);
7572 } else {
7573 GenPackedSwitchWithCompares(value_reg,
7574 lower_bound,
7575 num_entries,
7576 switch_block,
7577 default_block);
7578 }
7579}
7580
Chris Larsenc9905a62017-03-13 17:06:18 -07007581void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7582 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007583 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007584 locations->SetInAt(0, Location::RequiresRegister());
7585 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007586}
7587
Chris Larsenc9905a62017-03-13 17:06:18 -07007588void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7589 LocationSummary* locations = instruction->GetLocations();
7590 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7591 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7592 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7593 __ LoadFromOffset(kLoadDoubleword,
7594 locations->Out().AsRegister<GpuRegister>(),
7595 locations->InAt(0).AsRegister<GpuRegister>(),
7596 method_offset);
7597 } else {
7598 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7599 instruction->GetIndex(), kMips64PointerSize));
7600 __ LoadFromOffset(kLoadDoubleword,
7601 locations->Out().AsRegister<GpuRegister>(),
7602 locations->InAt(0).AsRegister<GpuRegister>(),
7603 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7604 __ LoadFromOffset(kLoadDoubleword,
7605 locations->Out().AsRegister<GpuRegister>(),
7606 locations->Out().AsRegister<GpuRegister>(),
7607 method_offset);
7608 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007609}
7610
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007611void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7612 ATTRIBUTE_UNUSED) {
7613 LOG(FATAL) << "Unreachable";
7614}
7615
7616void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7617 ATTRIBUTE_UNUSED) {
7618 LOG(FATAL) << "Unreachable";
7619}
7620
Alexey Frunze4dda3372015-06-01 18:31:49 -07007621} // namespace mips64
7622} // namespace art