blob: 4b86f5d4238d0e6b0feec610e9b563a3aef7d8f5 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Lena Djokic0d2cab52018-03-06 15:20:45 +010049inline bool IntrinsicCodeGeneratorMIPS64::HasMsa() const {
50 return codegen_->GetInstructionSetFeatures().HasMsa();
51}
52
Chris Larsen9701c2e2015-09-04 17:22:47 -070053#define __ codegen->GetAssembler()->
54
55static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070057 CodeGeneratorMIPS64* codegen) {
58 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060 return;
61 }
62
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070064
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070066 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
67 if (trg_reg != V0) {
68 __ Move(V0, trg_reg);
69 }
70 } else {
71 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
72 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010073 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070074 __ MovS(F0, trg_reg);
75 } else {
76 __ MovD(F0, trg_reg);
77 }
78 }
79 }
80}
81
82static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
83 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
84 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
85}
86
87// Slow-path for fallback (calling the managed code to handle the
88// intrinsic) in an intrinsified call. This will copy the arguments
89// into the positions for a regular call.
90//
91// Note: The actual parameters are required to be in the locations
92// given by the invoke's location summary. If an intrinsic
93// modifies those locations before a slowpath call, they must be
94// restored!
95class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
96 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000097 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
98 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070099
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100100 void EmitNativeCode(CodeGenerator* codegen_in) override {
Chris Larsen9701c2e2015-09-04 17:22:47 -0700101 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
102
103 __ Bind(GetEntryLabel());
104
105 SaveLiveRegisters(codegen, invoke_->GetLocations());
106
107 MoveArguments(invoke_, codegen);
108
109 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100110 codegen->GenerateStaticOrDirectCall(
111 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700112 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100113 codegen->GenerateVirtualCall(
114 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700115 }
116
117 // Copy the result back to the expected output.
118 Location out = invoke_->GetLocations()->Out();
119 if (out.IsValid()) {
120 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
121 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
122 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
123 }
124
125 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700126 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700127 }
128
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100129 const char* GetDescription() const override { return "IntrinsicSlowPathMIPS64"; }
Chris Larsen9701c2e2015-09-04 17:22:47 -0700130
131 private:
132 // The instruction where this slow path is happening.
133 HInvoke* const invoke_;
134
135 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
136};
137
138#undef __
139
Chris Larsen3039e382015-08-26 07:54:08 -0700140bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
141 Dispatch(invoke);
142 LocationSummary* res = invoke->GetLocations();
143 return res != nullptr && res->Intrinsified();
144}
145
146#define __ assembler->
147
Vladimir Markoca6fff82017-10-03 14:49:14 +0100148static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
149 LocationSummary* locations =
150 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700151 locations->SetInAt(0, Location::RequiresFpuRegister());
152 locations->SetOut(Location::RequiresRegister());
153}
154
155static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
156 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
157 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
158
159 if (is64bit) {
160 __ Dmfc1(out, in);
161 } else {
162 __ Mfc1(out, in);
163 }
164}
165
166// long java.lang.Double.doubleToRawLongBits(double)
167void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100168 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000172 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700173}
174
175// int java.lang.Float.floatToRawIntBits(float)
176void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100177 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
180void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000181 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700182}
183
Vladimir Markoca6fff82017-10-03 14:49:14 +0100184static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
185 LocationSummary* locations =
186 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
192 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
193 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
194
195 if (is64bit) {
196 __ Dmtc1(in, out);
197 } else {
198 __ Mtc1(in, out);
199 }
200}
201
202// double java.lang.Double.longBitsToDouble(long)
203void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100204 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700209}
210
211// float java.lang.Float.intBitsToFloat(int)
212void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100213 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
216void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000217 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700218}
219
Vladimir Markoca6fff82017-10-03 14:49:14 +0100220static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
221 LocationSummary* locations =
222 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100228 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700229 Mips64Assembler* assembler) {
230 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
231 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
232
233 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Dsbh(out, in);
236 __ Seh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Rotr(out, in, 16);
240 __ Wsbh(out, out);
241 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100242 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700243 __ Dsbh(out, in);
244 __ Dshd(out, out);
245 break;
246 default:
247 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
248 UNREACHABLE();
249 }
250}
251
252// int java.lang.Integer.reverseBytes(int)
253void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100254 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100258 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700259}
260
261// long java.lang.Long.reverseBytes(long)
262void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100263 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100267 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700268}
269
270// short java.lang.Short.reverseBytes(short)
271void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100272 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
275void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100276 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700277}
278
Chris Larsen81284372015-10-21 15:28:53 -0700279static void GenNumberOfLeadingZeroes(LocationSummary* locations,
280 bool is64bit,
281 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700282 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
283 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
284
285 if (is64bit) {
286 __ Dclz(out, in);
287 } else {
288 __ Clz(out, in);
289 }
290}
291
292// int java.lang.Integer.numberOfLeadingZeros(int i)
293void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100294 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000298 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700299}
300
301// int java.lang.Long.numberOfLeadingZeros(long i)
302void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100303 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700304}
305
306void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000307 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700308}
309
Chris Larsen81284372015-10-21 15:28:53 -0700310static void GenNumberOfTrailingZeroes(LocationSummary* locations,
311 bool is64bit,
312 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700313 Location in = locations->InAt(0);
314 Location out = locations->Out();
315
316 if (is64bit) {
317 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
318 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
319 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 } else {
322 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
323 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
324 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
325 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
326 }
327}
328
329// int java.lang.Integer.numberOfTrailingZeros(int i)
330void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100331 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000335 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700336}
337
338// int java.lang.Long.numberOfTrailingZeros(long i)
339void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100340 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700341}
342
343void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000344 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700345}
346
347static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100348 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700349 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100350 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700351
352 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
353 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700356 __ Rotr(out, in, 16);
357 __ Wsbh(out, out);
358 __ Bitswap(out, out);
359 } else {
360 __ Dsbh(out, in);
361 __ Dshd(out, out);
362 __ Dbitswap(out, out);
363 }
364}
365
366// int java.lang.Integer.reverse(int)
367void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100368 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700373}
374
375// long java.lang.Long.reverse(long)
376void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100377 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
380void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100381 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700382}
383
Vladimir Markoca6fff82017-10-03 14:49:14 +0100384static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
385 LocationSummary* locations =
386 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700387 locations->SetInAt(0, Location::RequiresFpuRegister());
388 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
389}
390
Chris Larsen7fda7852016-04-21 16:00:36 -0700391static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100392 const DataType::Type type,
Lena Djokic0d2cab52018-03-06 15:20:45 +0100393 const bool hasMsa,
Chris Larsen7fda7852016-04-21 16:00:36 -0700394 Mips64Assembler* assembler) {
395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
396 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100398 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700399
400 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
401 //
402 // A generalization of the best bit counting method to integers of
403 // bit-widths up to 128 (parameterized by type T) is this:
404 //
405 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
406 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
407 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
408 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
409 //
410 // For comparison, for 32-bit quantities, this algorithm can be executed
411 // using 20 MIPS instructions (the calls to LoadConst32() generate two
412 // machine instructions each for the values being used in this algorithm).
413 // A(n unrolled) loop-based algorithm requires 25 instructions.
414 //
415 // For a 64-bit operand this can be performed in 24 instructions compared
416 // to a(n unrolled) loop based algorithm which requires 38 instructions.
417 //
418 // There are algorithms which are faster in the cases where very few
419 // bits are set but the algorithm here attempts to minimize the total
420 // number of instructions executed even when a large number of bits
421 // are set.
Lena Djokic0d2cab52018-03-06 15:20:45 +0100422 if (hasMsa) {
423 if (type == DataType::Type::kInt32) {
424 __ Mtc1(in, FTMP);
425 __ PcntW(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
426 __ Mfc1(out, FTMP);
427 } else {
428 __ Dmtc1(in, FTMP);
429 __ PcntD(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
430 __ Dmfc1(out, FTMP);
431 }
432 } else {
433 if (type == DataType::Type::kInt32) {
434 __ Srl(TMP, in, 1);
435 __ LoadConst32(AT, 0x55555555);
436 __ And(TMP, TMP, AT);
437 __ Subu(TMP, in, TMP);
438 __ LoadConst32(AT, 0x33333333);
439 __ And(out, TMP, AT);
440 __ Srl(TMP, TMP, 2);
441 __ And(TMP, TMP, AT);
442 __ Addu(TMP, out, TMP);
443 __ Srl(out, TMP, 4);
444 __ Addu(out, out, TMP);
445 __ LoadConst32(AT, 0x0F0F0F0F);
446 __ And(out, out, AT);
447 __ LoadConst32(TMP, 0x01010101);
448 __ MulR6(out, out, TMP);
449 __ Srl(out, out, 24);
450 } else {
451 __ Dsrl(TMP, in, 1);
452 __ LoadConst64(AT, 0x5555555555555555L);
453 __ And(TMP, TMP, AT);
454 __ Dsubu(TMP, in, TMP);
455 __ LoadConst64(AT, 0x3333333333333333L);
456 __ And(out, TMP, AT);
457 __ Dsrl(TMP, TMP, 2);
458 __ And(TMP, TMP, AT);
459 __ Daddu(TMP, out, TMP);
460 __ Dsrl(out, TMP, 4);
461 __ Daddu(out, out, TMP);
462 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
463 __ And(out, out, AT);
464 __ LoadConst64(TMP, 0x0101010101010101L);
465 __ Dmul(out, out, TMP);
466 __ Dsrl32(out, out, 24);
467 }
Chris Larsen7fda7852016-04-21 16:00:36 -0700468 }
469}
470
471// int java.lang.Integer.bitCount(int)
472void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100473 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700474}
475
476void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100477 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700478}
479
480// int java.lang.Long.bitCount(long)
481void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100482 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700483}
484
485void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100486 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700487}
488
Chris Larsen0b7ac982015-09-04 12:54:28 -0700489// double java.lang.Math.sqrt(double)
490void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100491 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492}
493
494void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
495 LocationSummary* locations = invoke->GetLocations();
496 Mips64Assembler* assembler = GetAssembler();
497 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
498 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
499
500 __ SqrtD(out, in);
501}
502
Vladimir Markoca6fff82017-10-03 14:49:14 +0100503static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700504 HInvoke* invoke,
505 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100506 LocationSummary* locations =
507 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700508 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700509 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700510}
511
512// double java.lang.Math.rint(double)
513void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100514 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700515}
516
517void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
518 LocationSummary* locations = invoke->GetLocations();
519 Mips64Assembler* assembler = GetAssembler();
520 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
521 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
522
523 __ RintD(out, in);
524}
525
526// double java.lang.Math.floor(double)
527void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100528 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700529}
530
Chris Larsen14500822015-10-01 11:35:18 -0700531const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
532 kPositiveInfinity |
533 kNegativeZero |
534 kNegativeInfinity |
535 kQuietNaN |
536 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700537
Chris Larsen81284372015-10-21 15:28:53 -0700538enum FloatRoundingMode {
539 kFloor,
540 kCeil,
541};
542
543static void GenRoundingMode(LocationSummary* locations,
544 FloatRoundingMode mode,
545 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700546 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
547 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
548
Chris Larsen81284372015-10-21 15:28:53 -0700549 DCHECK_NE(in, out);
550
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700551 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700552
Chris Larsen81284372015-10-21 15:28:53 -0700553 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700554 // if in.isNaN || in.isInfinite || in.isZero {
555 // return in;
556 // }
557 __ ClassD(out, in);
558 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700559 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700560 __ MovD(out, in);
561 __ Bnezc(AT, &done);
562
Chris Larsen81284372015-10-21 15:28:53 -0700563 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200564 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700565 // // floor()/ceil() has almost certainly returned a value
566 // // which can't be successfully represented as a signed
567 // // 64-bit number. Java expects that the input value will
568 // // be returned in these cases.
569 // // There is also a small probability that floor(in)/ceil(in)
570 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200571 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
572 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700573 // return in;
574 // }
Chris Larsen81284372015-10-21 15:28:53 -0700575 if (mode == kFloor) {
576 __ FloorLD(out, in);
577 } else if (mode == kCeil) {
578 __ CeilLD(out, in);
579 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 __ Dmfc1(AT, out);
581 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200582 __ Daddiu(TMP, AT, 1);
583 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
584 // or AT - 0x7FFF FFFF FFFF FFFF.
585 // IOW, TMP = 1 if AT = Long.MIN_VALUE
586 // or TMP = 0 if AT = Long.MAX_VALUE.
587 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
588 // or AT = Long.MAX_VALUE.
589 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700590
591 // double out = outLong;
592 // return out;
593 __ Dmtc1(AT, out);
594 __ Cvtdl(out, out);
595 __ Bind(&done);
596 // }
597}
598
Chris Larsen81284372015-10-21 15:28:53 -0700599void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
600 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
601}
602
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603// double java.lang.Math.ceil(double)
604void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100605 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700606}
607
608void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700609 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700610}
611
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100612static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700613 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
614 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
615 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
616
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100617 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700618
619 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700620
Chris Larsen7adaab02016-04-21 14:49:20 -0700621 // out = floor(in);
622 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200623 // if (out != MAX_VALUE && out != MIN_VALUE) {
624 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700625 // return out += TMP;
626 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200627 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700628
629 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100630 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700631 __ FloorLD(FTMP, in);
632 __ Dmfc1(out, FTMP);
633 } else {
634 __ FloorWS(FTMP, in);
635 __ Mfc1(out, FTMP);
636 }
637
Lena Djokicf4e23a82017-05-09 15:43:45 +0200638 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100639 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200640 __ Daddiu(TMP, out, 1);
641 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
642 // or out - 0x7FFF FFFF FFFF FFFF.
643 // IOW, TMP = 1 if out = Long.MIN_VALUE
644 // or TMP = 0 if out = Long.MAX_VALUE.
645 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
646 // or out = Long.MAX_VALUE.
647 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700648 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200649 __ Addiu(TMP, out, 1);
650 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
651 // or out - 0x7FFF FFFF.
652 // IOW, TMP = 1 if out = Int.MIN_VALUE
653 // or TMP = 0 if out = Int.MAX_VALUE.
654 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
655 // or out = Int.MAX_VALUE.
656 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700657 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700658
659 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100660 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700661 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
662 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
663 __ SubD(FTMP, in, FTMP);
664 __ Dmtc1(AT, half);
665 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200666 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700667 } else {
668 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
669 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
670 __ SubS(FTMP, in, FTMP);
671 __ Mtc1(AT, half);
672 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200673 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700674 }
675
Chris Larsen7adaab02016-04-21 14:49:20 -0700676 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100677 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200678 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700679 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200680 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700681 }
682
683 __ Bind(&done);
684}
685
686// int java.lang.Math.round(float)
687void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100688 LocationSummary* locations =
689 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700690 locations->SetInAt(0, Location::RequiresFpuRegister());
691 locations->AddTemp(Location::RequiresFpuRegister());
692 locations->SetOut(Location::RequiresRegister());
693}
694
695void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100696 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700697}
698
699// long java.lang.Math.round(double)
700void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100701 LocationSummary* locations =
702 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700703 locations->SetInAt(0, Location::RequiresFpuRegister());
704 locations->AddTemp(Location::RequiresFpuRegister());
705 locations->SetOut(Location::RequiresRegister());
706}
707
708void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100709 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700710}
711
Chris Larsen70fb1f42015-09-04 10:15:27 -0700712// byte libcore.io.Memory.peekByte(long address)
713void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100714 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700715}
716
717void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
718 Mips64Assembler* assembler = GetAssembler();
719 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
720 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
721
722 __ Lb(out, adr, 0);
723}
724
725// short libcore.io.Memory.peekShort(long address)
726void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100727 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700728}
729
730void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
731 Mips64Assembler* assembler = GetAssembler();
732 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
733 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
734
735 __ Lh(out, adr, 0);
736}
737
738// int libcore.io.Memory.peekInt(long address)
739void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100740 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700741}
742
743void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
744 Mips64Assembler* assembler = GetAssembler();
745 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
746 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
747
748 __ Lw(out, adr, 0);
749}
750
751// long libcore.io.Memory.peekLong(long address)
752void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100753 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700754}
755
756void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
757 Mips64Assembler* assembler = GetAssembler();
758 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
759 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
760
761 __ Ld(out, adr, 0);
762}
763
Vladimir Markoca6fff82017-10-03 14:49:14 +0100764static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
765 LocationSummary* locations =
766 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700767 locations->SetInAt(0, Location::RequiresRegister());
768 locations->SetInAt(1, Location::RequiresRegister());
769}
770
771// void libcore.io.Memory.pokeByte(long address, byte value)
772void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100773 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700774}
775
776void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
777 Mips64Assembler* assembler = GetAssembler();
778 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
779 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
780
781 __ Sb(val, adr, 0);
782}
783
784// void libcore.io.Memory.pokeShort(long address, short value)
785void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100786 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700787}
788
789void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
790 Mips64Assembler* assembler = GetAssembler();
791 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
792 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
793
794 __ Sh(val, adr, 0);
795}
796
797// void libcore.io.Memory.pokeInt(long address, int value)
798void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100799 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700800}
801
802void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
803 Mips64Assembler* assembler = GetAssembler();
804 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
805 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
806
807 __ Sw(val, adr, 00);
808}
809
810// void libcore.io.Memory.pokeLong(long address, long value)
811void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100812 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700813}
814
815void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
816 Mips64Assembler* assembler = GetAssembler();
817 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
818 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
819
820 __ Sd(val, adr, 0);
821}
822
Chris Larsen49e55392015-09-04 16:04:03 -0700823// Thread java.lang.Thread.currentThread()
824void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100825 LocationSummary* locations =
826 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -0700827 locations->SetOut(Location::RequiresRegister());
828}
829
830void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
831 Mips64Assembler* assembler = GetAssembler();
832 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
833
834 __ LoadFromOffset(kLoadUnsignedWord,
835 out,
836 TR,
837 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
838}
839
Vladimir Markoca6fff82017-10-03 14:49:14 +0100840static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -0800841 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100842 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -0800843 bool can_call = kEmitCompilerReadBarrier &&
844 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
845 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100846 LocationSummary* locations =
847 new (allocator) LocationSummary(invoke,
848 can_call
849 ? LocationSummary::kCallOnSlowPath
850 : LocationSummary::kNoCall,
851 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700852 if (can_call && kUseBakerReadBarrier) {
853 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
854 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700855 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
856 locations->SetInAt(1, Location::RequiresRegister());
857 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -0800858 locations->SetOut(Location::RequiresRegister(),
859 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100860 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -0800861 // We need a temporary register for the read barrier marking slow
862 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
863 locations->AddTemp(Location::RequiresRegister());
864 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700865}
866
Alexey Frunze15958152017-02-09 19:08:30 -0800867// Note that the caller must supply a properly aligned memory address.
868// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -0700869static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100870 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -0700871 bool is_volatile,
872 CodeGeneratorMIPS64* codegen) {
873 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100874 DCHECK((type == DataType::Type::kInt32) ||
875 (type == DataType::Type::kInt64) ||
876 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -0700877 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -0800878 // Target register.
879 Location trg_loc = locations->Out();
880 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700881 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -0800882 Location base_loc = locations->InAt(1);
883 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700884 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -0800885 Location offset_loc = locations->InAt(2);
886 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700887
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100888 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -0800889 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -0700890 }
Alexey Frunze15958152017-02-09 19:08:30 -0800891
Chris Larsen1360ada2015-09-04 23:38:16 -0700892 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100893 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -0800894 __ Ld(trg, TMP, 0);
895 if (is_volatile) {
896 __ Sync(0);
897 }
898 break;
899
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100900 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -0700901 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -0800902 if (is_volatile) {
903 __ Sync(0);
904 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700905 break;
906
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100907 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -0800908 if (kEmitCompilerReadBarrier) {
909 if (kUseBakerReadBarrier) {
910 Location temp = locations->GetTemp(0);
911 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
912 trg_loc,
913 base,
914 /* offset */ 0U,
915 /* index */ offset_loc,
916 TIMES_1,
917 temp,
918 /* needs_null_check */ false);
919 if (is_volatile) {
920 __ Sync(0);
921 }
922 } else {
923 __ Lwu(trg, TMP, 0);
924 if (is_volatile) {
925 __ Sync(0);
926 }
927 codegen->GenerateReadBarrierSlow(invoke,
928 trg_loc,
929 trg_loc,
930 base_loc,
931 /* offset */ 0U,
932 /* index */ offset_loc);
933 }
934 } else {
935 __ Lwu(trg, TMP, 0);
936 if (is_volatile) {
937 __ Sync(0);
938 }
939 __ MaybeUnpoisonHeapReference(trg);
940 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700941 break;
942
943 default:
944 LOG(FATAL) << "Unsupported op size " << type;
945 UNREACHABLE();
946 }
947}
948
949// int sun.misc.Unsafe.getInt(Object o, long offset)
950void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100951 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700952}
953
954void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100955 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700956}
957
958// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
959void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100960 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700961}
962
963void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100964 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700965}
966
967// long sun.misc.Unsafe.getLong(Object o, long offset)
968void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100969 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700970}
971
972void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100973 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700974}
975
976// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
977void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100978 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700979}
980
981void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100982 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700983}
984
985// Object sun.misc.Unsafe.getObject(Object o, long offset)
986void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100987 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700988}
989
990void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100991 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700992}
993
994// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
995void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100996 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700997}
998
999void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001000 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001001}
1002
Vladimir Markoca6fff82017-10-03 14:49:14 +01001003static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1004 LocationSummary* locations =
1005 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001006 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1007 locations->SetInAt(1, Location::RequiresRegister());
1008 locations->SetInAt(2, Location::RequiresRegister());
1009 locations->SetInAt(3, Location::RequiresRegister());
1010}
1011
Alexey Frunze15958152017-02-09 19:08:30 -08001012// Note that the caller must supply a properly aligned memory address.
1013// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001014static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001015 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001016 bool is_volatile,
1017 bool is_ordered,
1018 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001019 DCHECK((type == DataType::Type::kInt32) ||
1020 (type == DataType::Type::kInt64) ||
1021 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001022 Mips64Assembler* assembler = codegen->GetAssembler();
1023 // Object pointer.
1024 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1025 // Long offset.
1026 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1027 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1028
1029 __ Daddu(TMP, base, offset);
1030 if (is_volatile || is_ordered) {
1031 __ Sync(0);
1032 }
1033 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001034 case DataType::Type::kInt32:
1035 case DataType::Type::kReference:
1036 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001037 __ PoisonHeapReference(AT, value);
1038 __ Sw(AT, TMP, 0);
1039 } else {
1040 __ Sw(value, TMP, 0);
1041 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001042 break;
1043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001044 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001045 __ Sd(value, TMP, 0);
1046 break;
1047
1048 default:
1049 LOG(FATAL) << "Unsupported op size " << type;
1050 UNREACHABLE();
1051 }
1052 if (is_volatile) {
1053 __ Sync(0);
1054 }
1055
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001056 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001057 bool value_can_be_null = true; // TODO: Worth finding out this information?
1058 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001059 }
1060}
1061
1062// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1063void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001064 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001065}
1066
1067void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001068 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001069 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001070 /* is_volatile */ false,
1071 /* is_ordered */ false,
1072 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001073}
1074
1075// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1076void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001077 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001078}
1079
1080void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001081 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001082 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001083 /* is_volatile */ false,
1084 /* is_ordered */ true,
1085 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001086}
1087
1088// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1089void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001090 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001091}
1092
1093void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001094 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001095 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001096 /* is_volatile */ true,
1097 /* is_ordered */ false,
1098 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001099}
1100
1101// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1102void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001103 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001104}
1105
1106void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001107 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001108 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001109 /* is_volatile */ false,
1110 /* is_ordered */ false,
1111 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001112}
1113
1114// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1115void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001116 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001117}
1118
1119void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001120 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001121 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001122 /* is_volatile */ false,
1123 /* is_ordered */ true,
1124 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001125}
1126
1127// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1128void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001129 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001130}
1131
1132void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001133 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001134 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001135 /* is_volatile */ true,
1136 /* is_ordered */ false,
1137 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001138}
1139
1140// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1141void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001142 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001143}
1144
1145void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001146 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001147 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001148 /* is_volatile */ false,
1149 /* is_ordered */ false,
1150 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001151}
1152
1153// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1154void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001155 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001156}
1157
1158void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001159 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001160 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001161 /* is_volatile */ false,
1162 /* is_ordered */ true,
1163 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001164}
1165
1166// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1167void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001168 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001169}
1170
1171void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001172 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001173 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001174 /* is_volatile */ true,
1175 /* is_ordered */ false,
1176 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001177}
1178
Vladimir Markoca6fff82017-10-03 14:49:14 +01001179static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001180 bool can_call = kEmitCompilerReadBarrier &&
1181 kUseBakerReadBarrier &&
1182 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001183 LocationSummary* locations =
1184 new (allocator) LocationSummary(invoke,
1185 can_call
1186 ? LocationSummary::kCallOnSlowPath
1187 : LocationSummary::kNoCall,
1188 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001189 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1190 locations->SetInAt(1, Location::RequiresRegister());
1191 locations->SetInAt(2, Location::RequiresRegister());
1192 locations->SetInAt(3, Location::RequiresRegister());
1193 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001194 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001195
1196 // Temporary register used in CAS by (Baker) read barrier.
1197 if (can_call) {
1198 locations->AddTemp(Location::RequiresRegister());
1199 }
Chris Larsen36427492015-10-23 02:19:38 -07001200}
1201
Alexey Frunze15958152017-02-09 19:08:30 -08001202// Note that the caller must supply a properly aligned memory address.
1203// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001204static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001205 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001206 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001207 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001208 Location offset_loc = locations->InAt(2);
1209 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001210 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1211 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001212 Location out_loc = locations->Out();
1213 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001214
1215 DCHECK_NE(base, out);
1216 DCHECK_NE(offset, out);
1217 DCHECK_NE(expected, out);
1218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001219 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001220 // The only read barrier implementation supporting the
1221 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1222 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1223
1224 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1225 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001226 bool value_can_be_null = true; // TODO: Worth finding out this information?
1227 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001228
1229 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1230 Location temp = locations->GetTemp(0);
1231 // Need to make sure the reference stored in the field is a to-space
1232 // one before attempting the CAS or the CAS could fail incorrectly.
1233 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1234 invoke,
1235 out_loc, // Unused, used only as a "temporary" within the read barrier.
1236 base,
1237 /* offset */ 0u,
1238 /* index */ offset_loc,
1239 ScaleFactor::TIMES_1,
1240 temp,
1241 /* needs_null_check */ false,
1242 /* always_update_field */ true);
1243 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001244 }
1245
Alexey Frunzec061de12017-02-14 13:27:23 -08001246 Mips64Label loop_head, exit_loop;
1247 __ Daddu(TMP, base, offset);
1248
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001249 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001250 __ PoisonHeapReference(expected);
1251 // Do not poison `value`, if it is the same register as
1252 // `expected`, which has just been poisoned.
1253 if (value != expected) {
1254 __ PoisonHeapReference(value);
1255 }
1256 }
1257
Chris Larsen36427492015-10-23 02:19:38 -07001258 // do {
1259 // tmp_value = [tmp_ptr] - expected;
1260 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1261 // result = tmp_value != 0;
1262
Chris Larsen36427492015-10-23 02:19:38 -07001263 __ Sync(0);
1264 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001265 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001266 __ Lld(out, TMP);
1267 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001268 // Note: We will need a read barrier here, when read barrier
1269 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001270 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001271 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001272 // The LL instruction sign-extends the 32-bit value, but
1273 // 32-bit references must be zero-extended. Zero-extend `out`.
1274 __ Dext(out, out, 0, 32);
1275 }
Chris Larsen36427492015-10-23 02:19:38 -07001276 }
1277 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1278 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1279 __ Beqzc(out, &exit_loop); // return.
1280 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1281 // If we use 'value' directly, we would lose 'value'
1282 // in the case that the store fails. Whether the
1283 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001284 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001286 __ Scd(out, TMP);
1287 } else {
1288 __ Sc(out, TMP);
1289 }
1290 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1291 // cycle atomically then retry.
1292 __ Bind(&exit_loop);
1293 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001294
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001296 __ UnpoisonHeapReference(expected);
1297 // Do not unpoison `value`, if it is the same register as
1298 // `expected`, which has just been unpoisoned.
1299 if (value != expected) {
1300 __ UnpoisonHeapReference(value);
1301 }
1302 }
Chris Larsen36427492015-10-23 02:19:38 -07001303}
1304
1305// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1306void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001307 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001308}
1309
1310void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001311 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001312}
1313
1314// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1315void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001316 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001317}
1318
1319void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001321}
1322
1323// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1324void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001325 // The only read barrier implementation supporting the
1326 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1327 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1328 return;
1329 }
1330
Vladimir Markoca6fff82017-10-03 14:49:14 +01001331 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001332}
1333
1334void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001335 // The only read barrier implementation supporting the
1336 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1337 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1338
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001340}
1341
Chris Larsen9701c2e2015-09-04 17:22:47 -07001342// int java.lang.String.compareTo(String anotherString)
1343void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001344 LocationSummary* locations = new (allocator_) LocationSummary(
1345 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001346 InvokeRuntimeCallingConvention calling_convention;
1347 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1348 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001350 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1351}
1352
1353void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1354 Mips64Assembler* assembler = GetAssembler();
1355 LocationSummary* locations = invoke->GetLocations();
1356
1357 // Note that the null check must have been done earlier.
1358 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1359
1360 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001361 SlowPathCodeMIPS64* slow_path =
1362 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001363 codegen_->AddSlowPath(slow_path);
1364 __ Beqzc(argument, slow_path->GetEntryLabel());
1365
Serban Constantinescufc734082016-07-19 17:18:07 +01001366 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001367 __ Bind(slow_path->GetExitLabel());
1368}
1369
Chris Larsen972d6d72015-10-20 11:29:12 -07001370// boolean java.lang.String.equals(Object anObject)
1371void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001372 LocationSummary* locations =
1373 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001374 locations->SetInAt(0, Location::RequiresRegister());
1375 locations->SetInAt(1, Location::RequiresRegister());
1376 locations->SetOut(Location::RequiresRegister());
1377
1378 // Temporary registers to store lengths of strings and for calculations.
1379 locations->AddTemp(Location::RequiresRegister());
1380 locations->AddTemp(Location::RequiresRegister());
1381 locations->AddTemp(Location::RequiresRegister());
1382}
1383
1384void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1385 Mips64Assembler* assembler = GetAssembler();
1386 LocationSummary* locations = invoke->GetLocations();
1387
1388 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1389 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1390 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1391
1392 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1393 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1394 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1395
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001396 Mips64Label loop;
1397 Mips64Label end;
1398 Mips64Label return_true;
1399 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001400
1401 // Get offsets of count, value, and class fields within a string object.
1402 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1403 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1404 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1405
1406 // Note that the null check must have been done earlier.
1407 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1408
1409 // If the register containing the pointer to "this", and the register
1410 // containing the pointer to "anObject" are the same register then
1411 // "this", and "anObject" are the same object and we can
1412 // short-circuit the logic to a true result.
1413 if (str == arg) {
1414 __ LoadConst64(out, 1);
1415 return;
1416 }
1417
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001418 StringEqualsOptimizations optimizations(invoke);
1419 if (!optimizations.GetArgumentNotNull()) {
1420 // Check if input is null, return false if it is.
1421 __ Beqzc(arg, &return_false);
1422 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001423
1424 // Reference equality check, return true if same reference.
1425 __ Beqc(str, arg, &return_true);
1426
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001427 if (!optimizations.GetArgumentIsString()) {
1428 // Instanceof check for the argument by comparing class fields.
1429 // All string objects must have the same type since String cannot be subclassed.
1430 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1431 // If the argument is a string object, its class field must be equal to receiver's class field.
1432 __ Lw(temp1, str, class_offset);
1433 __ Lw(temp2, arg, class_offset);
1434 __ Bnec(temp1, temp2, &return_false);
1435 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001436
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001437 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001438 __ Lw(temp1, str, count_offset);
1439 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001440 // Check if `count` fields are equal, return false if they're not.
1441 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001442 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001443 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1444 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1445 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001446 __ Beqzc(temp1, &return_true);
1447
1448 // Don't overwrite input registers
1449 __ Move(TMP, str);
1450 __ Move(temp3, arg);
1451
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001452 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001453 DCHECK_ALIGNED(value_offset, 8);
1454 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1455
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001456 if (mirror::kUseStringCompression) {
1457 // For string compression, calculate the number of bytes to compare (not chars).
1458 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1459 __ Srl(temp1, temp1, 1); // Extract length.
1460 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1461 }
1462
1463 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1464 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001465 __ Bind(&loop);
1466 __ Ld(out, TMP, value_offset);
1467 __ Ld(temp2, temp3, value_offset);
1468 __ Bnec(out, temp2, &return_false);
1469 __ Daddiu(TMP, TMP, 8);
1470 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001471 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1472 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001473 __ Bgtzc(temp1, &loop);
1474
1475 // Return true and exit the function.
1476 // If loop does not result in returning false, we return true.
1477 __ Bind(&return_true);
1478 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001479 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001480
1481 // Return false and exit the function.
1482 __ Bind(&return_false);
1483 __ LoadConst64(out, 0);
1484 __ Bind(&end);
1485}
1486
Chris Larsen9701c2e2015-09-04 17:22:47 -07001487static void GenerateStringIndexOf(HInvoke* invoke,
1488 Mips64Assembler* assembler,
1489 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001490 bool start_at_zero) {
1491 LocationSummary* locations = invoke->GetLocations();
1492 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1493
1494 // Note that the null check must have been done earlier.
1495 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1496
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001497 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1498 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001499 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001500 HInstruction* code_point = invoke->InputAt(1);
1501 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001502 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001503 // Always needs the slow-path. We could directly dispatch to it,
1504 // but this case should be rare, so for simplicity just put the
1505 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001506 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001507 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001508 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001509 __ Bind(slow_path->GetExitLabel());
1510 return;
1511 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001512 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001513 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1514 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001515 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001516 codegen->AddSlowPath(slow_path);
1517 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1518 }
1519
1520 if (start_at_zero) {
1521 DCHECK_EQ(tmp_reg, A2);
1522 // Start-index = 0.
1523 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001524 }
1525
Serban Constantinescufc734082016-07-19 17:18:07 +01001526 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001527 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001528
1529 if (slow_path != nullptr) {
1530 __ Bind(slow_path->GetExitLabel());
1531 }
1532}
1533
1534// int java.lang.String.indexOf(int ch)
1535void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001536 LocationSummary* locations = new (allocator_) LocationSummary(
1537 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001538 // We have a hand-crafted assembly stub that follows the runtime
1539 // calling convention. So it's best to align the inputs accordingly.
1540 InvokeRuntimeCallingConvention calling_convention;
1541 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1542 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001543 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001544 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1545
1546 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1547 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1548}
1549
1550void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001551 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001552}
1553
1554// int java.lang.String.indexOf(int ch, int fromIndex)
1555void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001556 LocationSummary* locations = new (allocator_) LocationSummary(
1557 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001558 // We have a hand-crafted assembly stub that follows the runtime
1559 // calling convention. So it's best to align the inputs accordingly.
1560 InvokeRuntimeCallingConvention calling_convention;
1561 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1562 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1563 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001564 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001565 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1566}
1567
1568void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001569 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001570}
1571
Roland Levillaincc3839c2016-02-29 16:23:48 +00001572// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001573void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001574 LocationSummary* locations = new (allocator_) LocationSummary(
1575 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001576 InvokeRuntimeCallingConvention calling_convention;
1577 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1578 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1579 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1580 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001581 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001582 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1583}
1584
1585void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1586 Mips64Assembler* assembler = GetAssembler();
1587 LocationSummary* locations = invoke->GetLocations();
1588
1589 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001590 SlowPathCodeMIPS64* slow_path =
1591 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001592 codegen_->AddSlowPath(slow_path);
1593 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1594
Serban Constantinescufc734082016-07-19 17:18:07 +01001595 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001596 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001597 __ Bind(slow_path->GetExitLabel());
1598}
1599
Roland Levillaincc3839c2016-02-29 16:23:48 +00001600// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001601void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001602 LocationSummary* locations =
1603 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001604 InvokeRuntimeCallingConvention calling_convention;
1605 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1606 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1607 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001608 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001609 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1610}
1611
1612void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001613 // No need to emit code checking whether `locations->InAt(2)` is a null
1614 // pointer, as callers of the native method
1615 //
1616 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1617 //
1618 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001619 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001620 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001621}
1622
Roland Levillainf969a202016-03-09 16:14:00 +00001623// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001624void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001625 LocationSummary* locations = new (allocator_) LocationSummary(
1626 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001627 InvokeRuntimeCallingConvention calling_convention;
1628 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001629 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001630 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1631}
1632
1633void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1634 Mips64Assembler* assembler = GetAssembler();
1635 LocationSummary* locations = invoke->GetLocations();
1636
1637 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001638 SlowPathCodeMIPS64* slow_path =
1639 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001640 codegen_->AddSlowPath(slow_path);
1641 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1642
Serban Constantinescufc734082016-07-19 17:18:07 +01001643 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001644 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001645 __ Bind(slow_path->GetExitLabel());
1646}
1647
Chris Larsenddec7f92016-02-16 12:35:04 -08001648static void GenIsInfinite(LocationSummary* locations,
1649 bool is64bit,
1650 Mips64Assembler* assembler) {
1651 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1652 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1653
1654 if (is64bit) {
1655 __ ClassD(FTMP, in);
1656 } else {
1657 __ ClassS(FTMP, in);
1658 }
1659 __ Mfc1(out, FTMP);
1660 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1661 __ Sltu(out, ZERO, out);
1662}
1663
1664// boolean java.lang.Float.isInfinite(float)
1665void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001666 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001667}
1668
1669void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1670 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1671}
1672
1673// boolean java.lang.Double.isInfinite(double)
1674void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001675 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001676}
1677
1678void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1679 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1680}
1681
Chris Larsene3660592016-11-09 11:13:42 -08001682// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1683void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001684 LocationSummary* locations =
1685 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001686 locations->SetInAt(0, Location::RequiresRegister());
1687 locations->SetInAt(1, Location::RequiresRegister());
1688 locations->SetInAt(2, Location::RequiresRegister());
1689 locations->SetInAt(3, Location::RequiresRegister());
1690 locations->SetInAt(4, Location::RequiresRegister());
1691
Chris Larsen366d4332017-03-23 09:02:56 -07001692 locations->AddTemp(Location::RequiresRegister());
1693 locations->AddTemp(Location::RequiresRegister());
1694 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001695}
1696
1697void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1698 Mips64Assembler* assembler = GetAssembler();
1699 LocationSummary* locations = invoke->GetLocations();
1700
1701 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001702 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001703 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001704 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001705
1706 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1707 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1708 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1709 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1710 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1711
1712 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001713 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001714 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001715
1716 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001717 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001718
1719 // Location of data in char array buffer.
1720 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1721
1722 // Get offset of value field within a string object.
1723 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1724
1725 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1726
1727 // Calculate number of characters to be copied.
1728 __ Dsubu(numChrs, srcEnd, srcBegin);
1729
1730 // Calculate destination address.
1731 __ Daddiu(dstPtr, dstObj, data_offset);
1732 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1733
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001734 if (mirror::kUseStringCompression) {
1735 Mips64Label uncompressed_copy, compressed_loop;
1736 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1737 // Load count field and extract compression flag.
1738 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1739 __ Dext(TMP, TMP, 0, 1);
1740
Chris Larsen366d4332017-03-23 09:02:56 -07001741 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001742 __ Bnezc(TMP, &uncompressed_copy);
1743
1744 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1745 __ Daddu(srcPtr, srcObj, srcBegin);
1746 __ Bind(&compressed_loop);
1747 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1748 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1749 __ Daddiu(numChrs, numChrs, -1);
1750 __ Daddiu(srcPtr, srcPtr, 1);
1751 __ Daddiu(dstPtr, dstPtr, 2);
1752 __ Bnezc(numChrs, &compressed_loop);
1753
1754 __ Bc(&done);
1755 __ Bind(&uncompressed_copy);
1756 }
1757
Chris Larsene3660592016-11-09 11:13:42 -08001758 // Calculate source address.
1759 __ Daddiu(srcPtr, srcObj, value_offset);
1760 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1761
Chris Larsen366d4332017-03-23 09:02:56 -07001762 __ Bind(&loop);
1763 __ Lh(AT, srcPtr, 0);
1764 __ Daddiu(numChrs, numChrs, -1);
1765 __ Daddiu(srcPtr, srcPtr, char_size);
1766 __ Sh(AT, dstPtr, 0);
1767 __ Daddiu(dstPtr, dstPtr, char_size);
1768 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001769
1770 __ Bind(&done);
1771}
1772
Chris Larsen5863f852017-03-23 15:41:37 -07001773// static void java.lang.System.arraycopy(Object src, int srcPos,
1774// Object dest, int destPos,
1775// int length)
1776void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1777 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1778 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1779 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1780
1781 // As long as we are checking, we might as well check to see if the src and dest
1782 // positions are >= 0.
1783 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1784 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1785 // We will have to fail anyways.
1786 return;
1787 }
1788
1789 // And since we are already checking, check the length too.
1790 if (length != nullptr) {
1791 int32_t len = length->GetValue();
1792 if (len < 0) {
1793 // Just call as normal.
1794 return;
1795 }
1796 }
1797
1798 // Okay, it is safe to generate inline code.
1799 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001800 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07001801 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1802 locations->SetInAt(0, Location::RequiresRegister());
1803 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1804 locations->SetInAt(2, Location::RequiresRegister());
1805 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1806 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1807
1808 locations->AddTemp(Location::RequiresRegister());
1809 locations->AddTemp(Location::RequiresRegister());
1810 locations->AddTemp(Location::RequiresRegister());
1811}
1812
1813// Utility routine to verify that "length(input) - pos >= length"
1814static void EnoughItems(Mips64Assembler* assembler,
1815 GpuRegister length_input_minus_pos,
1816 Location length,
1817 SlowPathCodeMIPS64* slow_path) {
1818 if (length.IsConstant()) {
1819 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
1820
1821 if (IsInt<16>(length_constant)) {
1822 __ Slti(TMP, length_input_minus_pos, length_constant);
1823 __ Bnezc(TMP, slow_path->GetEntryLabel());
1824 } else {
1825 __ LoadConst32(TMP, length_constant);
1826 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
1827 }
1828 } else {
1829 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1830 }
1831}
1832
1833static void CheckPosition(Mips64Assembler* assembler,
1834 Location pos,
1835 GpuRegister input,
1836 Location length,
1837 SlowPathCodeMIPS64* slow_path,
1838 bool length_is_input_length = false) {
1839 // Where is the length in the Array?
1840 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1841
1842 // Calculate length(input) - pos.
1843 if (pos.IsConstant()) {
1844 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1845 if (pos_const == 0) {
1846 if (!length_is_input_length) {
1847 // Check that length(input) >= length.
1848 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1849 EnoughItems(assembler, AT, length, slow_path);
1850 }
1851 } else {
1852 // Check that (length(input) - pos) >= zero.
1853 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1854 DCHECK_GT(pos_const, 0);
1855 __ Addiu32(AT, AT, -pos_const);
1856 __ Bltzc(AT, slow_path->GetEntryLabel());
1857
1858 // Verify that (length(input) - pos) >= length.
1859 EnoughItems(assembler, AT, length, slow_path);
1860 }
1861 } else if (length_is_input_length) {
1862 // The only way the copy can succeed is if pos is zero.
1863 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1864 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
1865 } else {
1866 // Verify that pos >= 0.
1867 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1868 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
1869
1870 // Check that (length(input) - pos) >= zero.
1871 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1872 __ Subu(AT, AT, pos_reg);
1873 __ Bltzc(AT, slow_path->GetEntryLabel());
1874
1875 // Verify that (length(input) - pos) >= length.
1876 EnoughItems(assembler, AT, length, slow_path);
1877 }
1878}
1879
1880void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1881 Mips64Assembler* assembler = GetAssembler();
1882 LocationSummary* locations = invoke->GetLocations();
1883
1884 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
1885 Location src_pos = locations->InAt(1);
1886 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
1887 Location dest_pos = locations->InAt(3);
1888 Location length = locations->InAt(4);
1889
1890 Mips64Label loop;
1891
1892 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
1893 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
1894 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
1895
Vladimir Marko174b2e22017-10-12 13:34:49 +01001896 SlowPathCodeMIPS64* slow_path =
1897 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07001898 codegen_->AddSlowPath(slow_path);
1899
1900 // Bail out if the source and destination are the same (to handle overlap).
1901 __ Beqc(src, dest, slow_path->GetEntryLabel());
1902
1903 // Bail out if the source is null.
1904 __ Beqzc(src, slow_path->GetEntryLabel());
1905
1906 // Bail out if the destination is null.
1907 __ Beqzc(dest, slow_path->GetEntryLabel());
1908
1909 // Load length into register for count.
1910 if (length.IsConstant()) {
1911 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
1912 } else {
1913 // If the length is negative, bail out.
1914 // We have already checked in the LocationsBuilder for the constant case.
1915 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1916
1917 __ Move(count, length.AsRegister<GpuRegister>());
1918 }
1919
1920 // Validity checks: source.
1921 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
1922
1923 // Validity checks: dest.
1924 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
1925
1926 // If count is zero, we're done.
1927 __ Beqzc(count, slow_path->GetExitLabel());
1928
1929 // Okay, everything checks out. Finally time to do the copy.
1930 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001931 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001932 DCHECK_EQ(char_size, 2u);
1933
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001934 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001935
1936 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1937
1938 // Calculate source and destination addresses.
1939 if (src_pos.IsConstant()) {
1940 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
1941
1942 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
1943 } else {
1944 __ Daddiu64(src_base, src, data_offset, TMP);
1945 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
1946 }
1947 if (dest_pos.IsConstant()) {
1948 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1949
1950 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
1951 } else {
1952 __ Daddiu64(dest_base, dest, data_offset, TMP);
1953 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
1954 }
1955
1956 __ Bind(&loop);
1957 __ Lh(TMP, src_base, 0);
1958 __ Daddiu(src_base, src_base, char_size);
1959 __ Daddiu(count, count, -1);
1960 __ Sh(TMP, dest_base, 0);
1961 __ Daddiu(dest_base, dest_base, char_size);
1962 __ Bnezc(count, &loop);
1963
1964 __ Bind(slow_path->GetExitLabel());
1965}
1966
Chris Larsenab922502016-04-15 10:00:56 -07001967static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001968 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07001969 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001970 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07001971
1972 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
1973 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1974
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001975 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07001976 __ Dclz(TMP, in);
1977 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07001978 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001979 } else {
1980 __ Clz(TMP, in);
1981 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07001982 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001983 }
1984 // For either value of "type", when "in" is zero, "out" should also
1985 // be zero. Without this extra "and" operation, when "in" is zero,
1986 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
1987 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
1988 // the shift amount (TMP) directly; they use either (TMP % 64) or
1989 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07001990 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07001991}
1992
1993// int java.lang.Integer.highestOneBit(int)
1994void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001995 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07001996}
1997
1998void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001999 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002000}
2001
2002// long java.lang.Long.highestOneBit(long)
2003void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002004 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002005}
2006
2007void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002008 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002009}
2010
2011static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002012 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002013 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002014 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002015
2016 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2017 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2018
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002019 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002020 __ Dsubu(TMP, ZERO, in);
2021 } else {
2022 __ Subu(TMP, ZERO, in);
2023 }
2024 __ And(out, TMP, in);
2025}
2026
2027// int java.lang.Integer.lowestOneBit(int)
2028void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002029 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002030}
2031
2032void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002033 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002034}
2035
2036// long java.lang.Long.lowestOneBit(long)
2037void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002038 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002039}
2040
2041void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002042 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002043}
2044
Vladimir Markoca6fff82017-10-03 14:49:14 +01002045static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2046 LocationSummary* locations =
2047 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002048 InvokeRuntimeCallingConvention calling_convention;
2049
2050 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002051 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002052}
2053
Vladimir Markoca6fff82017-10-03 14:49:14 +01002054static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2055 LocationSummary* locations =
2056 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002057 InvokeRuntimeCallingConvention calling_convention;
2058
2059 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2060 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002061 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002062}
2063
2064static void GenFPToFPCall(HInvoke* invoke,
2065 CodeGeneratorMIPS64* codegen,
2066 QuickEntrypointEnum entry) {
2067 LocationSummary* locations = invoke->GetLocations();
2068 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2069 DCHECK_EQ(in, F12);
2070 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2071 DCHECK_EQ(out, F0);
2072
2073 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2074}
2075
2076static void GenFPFPToFPCall(HInvoke* invoke,
2077 CodeGeneratorMIPS64* codegen,
2078 QuickEntrypointEnum entry) {
2079 LocationSummary* locations = invoke->GetLocations();
2080 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2081 DCHECK_EQ(in0, F12);
2082 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2083 DCHECK_EQ(in1, F13);
2084 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2085 DCHECK_EQ(out, F0);
2086
2087 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2088}
2089
2090// static double java.lang.Math.cos(double a)
2091void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002092 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002093}
2094
2095void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2096 GenFPToFPCall(invoke, codegen_, kQuickCos);
2097}
2098
2099// static double java.lang.Math.sin(double a)
2100void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002101 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002102}
2103
2104void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2105 GenFPToFPCall(invoke, codegen_, kQuickSin);
2106}
2107
2108// static double java.lang.Math.acos(double a)
2109void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002110 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002111}
2112
2113void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2114 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2115}
2116
2117// static double java.lang.Math.asin(double a)
2118void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002119 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002120}
2121
2122void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2123 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2124}
2125
2126// static double java.lang.Math.atan(double a)
2127void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002128 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002129}
2130
2131void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2132 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2133}
2134
2135// static double java.lang.Math.atan2(double y, double x)
2136void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002137 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002138}
2139
2140void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2141 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2142}
2143
Vladimir Marko4d179872018-01-19 14:50:10 +00002144// static double java.lang.Math.pow(double y, double x)
2145void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2146 CreateFPFPToFPCallLocations(allocator_, invoke);
2147}
2148
2149void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2150 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2151}
2152
Chris Larsen74c20582017-03-28 22:17:35 -07002153// static double java.lang.Math.cbrt(double a)
2154void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002155 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002156}
2157
2158void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2159 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2160}
2161
2162// static double java.lang.Math.cosh(double x)
2163void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002164 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002165}
2166
2167void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2168 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2169}
2170
2171// static double java.lang.Math.exp(double a)
2172void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002173 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002174}
2175
2176void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2177 GenFPToFPCall(invoke, codegen_, kQuickExp);
2178}
2179
2180// static double java.lang.Math.expm1(double x)
2181void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002182 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002183}
2184
2185void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2186 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2187}
2188
2189// static double java.lang.Math.hypot(double x, double y)
2190void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002191 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002192}
2193
2194void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2195 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2196}
2197
2198// static double java.lang.Math.log(double a)
2199void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002200 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002201}
2202
2203void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2204 GenFPToFPCall(invoke, codegen_, kQuickLog);
2205}
2206
2207// static double java.lang.Math.log10(double x)
2208void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002209 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002210}
2211
2212void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2213 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2214}
2215
2216// static double java.lang.Math.nextAfter(double start, double direction)
2217void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002218 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002219}
2220
2221void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2222 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2223}
2224
2225// static double java.lang.Math.sinh(double x)
2226void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002227 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002228}
2229
2230void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2231 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2232}
2233
2234// static double java.lang.Math.tan(double a)
2235void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002236 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002237}
2238
2239void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2240 GenFPToFPCall(invoke, codegen_, kQuickTan);
2241}
2242
2243// static double java.lang.Math.tanh(double x)
2244void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002245 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002246}
2247
2248void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2249 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2250}
2251
Chris Larsen5633ce72017-04-10 15:47:40 -07002252// long java.lang.Integer.valueOf(long)
2253void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2254 InvokeRuntimeCallingConvention calling_convention;
2255 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2256 invoke,
2257 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002258 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002259 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2260}
2261
2262void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01002263 IntrinsicVisitor::IntegerValueOfInfo info =
2264 IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions());
Chris Larsen5633ce72017-04-10 15:47:40 -07002265 LocationSummary* locations = invoke->GetLocations();
2266 Mips64Assembler* assembler = GetAssembler();
2267 InstructionCodeGeneratorMIPS64* icodegen =
2268 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2269
2270 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Chris Larsen5633ce72017-04-10 15:47:40 -07002271 if (invoke->InputAt(0)->IsConstant()) {
2272 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
Vladimir Marko6fd16062018-06-26 11:02:04 +01002273 if (static_cast<uint32_t>(value - info.low) < info.length) {
Chris Larsen5633ce72017-04-10 15:47:40 -07002274 // Just embed the j.l.Integer in the code.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002275 DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference);
2276 codegen_->LoadBootImageAddress(out, info.value_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002277 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002278 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002279 // Allocate and initialize a new j.l.Integer.
2280 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2281 // JIT object table.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002282 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2283 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002284 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2285 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2286 // one.
2287 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2288 }
2289 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002290 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002291 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2292 Mips64Label allocate, done;
Chris Larsen5633ce72017-04-10 15:47:40 -07002293
Chris Larsen5633ce72017-04-10 15:47:40 -07002294 __ Addiu32(out, in, -info.low);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002295 // As unsigned quantities is out < info.length ?
2296 __ LoadConst32(AT, info.length);
2297 // Branch if out >= info.length . This means that "in" is outside of the valid range.
Chris Larsen5633ce72017-04-10 15:47:40 -07002298 __ Bgeuc(out, AT, &allocate);
2299
2300 // If the value is within the bounds, load the j.l.Integer directly from the array.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002301 codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002302 __ Dlsa(out, out, TMP, TIMES_4);
2303 __ Lwu(out, out, 0);
2304 __ MaybeUnpoisonHeapReference(out);
2305 __ Bc(&done);
2306
2307 __ Bind(&allocate);
2308 // Otherwise allocate and initialize a new j.l.Integer.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002309 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2310 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002311 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2312 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2313 // one.
2314 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2315 __ Bind(&done);
2316 }
2317}
2318
Chris Larsenb065b032017-11-02 12:13:20 -07002319// static boolean java.lang.Thread.interrupted()
2320void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2321 LocationSummary* locations =
2322 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2323 locations->SetOut(Location::RequiresRegister());
2324}
2325
2326void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2327 Mips64Assembler* assembler = GetAssembler();
2328 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2329 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2330 __ LoadFromOffset(kLoadWord, out, TR, offset);
2331 Mips64Label done;
2332 __ Beqzc(out, &done);
2333 __ Sync(0);
2334 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2335 __ Sync(0);
2336 __ Bind(&done);
2337}
2338
Hans Boehmc7b28de2018-03-09 17:05:28 -08002339void IntrinsicLocationsBuilderMIPS64::VisitReachabilityFence(HInvoke* invoke) {
2340 LocationSummary* locations =
2341 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2342 locations->SetInAt(0, Location::Any());
2343}
2344
2345void IntrinsicCodeGeneratorMIPS64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
2346
Aart Bik2f9fcc92016-03-01 15:16:54 -08002347UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002348UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
xueliang.zhongcb58b072017-10-13 12:06:56 +01002349UNIMPLEMENTED_INTRINSIC(MIPS64, CRC32Update)
Aart Bik3f67e692016-01-15 14:35:12 -08002350
Aart Bikff7d89c2016-11-07 08:49:28 -08002351UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2352UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002353UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2354UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2355UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2356UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2357UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2358UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002359
Aart Bik0e54c012016-03-04 12:08:31 -08002360// 1.8.
2361UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2362UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2363UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2364UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2365UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002366
Aart Bik2f9fcc92016-03-01 15:16:54 -08002367UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002368
2369#undef __
2370
2371} // namespace mips64
2372} // namespace art