blob: 06fab616ad76f84883d093e19f195f4f71a76dff [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips/assembler_mips.h"
28#include "utils/mips/constants_mips.h"
29
30namespace art {
31
32namespace mips {
33
34IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
39 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsene16ce5a2015-11-18 12:30:20 -080046inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() {
47 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
48}
49
50inline bool IntrinsicCodeGeneratorMIPS::IsR6() {
51 return codegen_->GetInstructionSetFeatures().IsR6();
52}
53
Chris Larsen701566a2015-10-27 15:29:13 -070054#define __ codegen->GetAssembler()->
55
56static void MoveFromReturnRegister(Location trg,
57 Primitive::Type type,
58 CodeGeneratorMIPS* codegen) {
59 if (!trg.IsValid()) {
60 DCHECK_EQ(type, Primitive::kPrimVoid);
61 return;
62 }
63
64 DCHECK_NE(type, Primitive::kPrimVoid);
65
66 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
67 Register trg_reg = trg.AsRegister<Register>();
68 if (trg_reg != V0) {
69 __ Move(V0, trg_reg);
70 }
71 } else {
72 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
73 if (trg_reg != F0) {
74 if (type == Primitive::kPrimFloat) {
75 __ MovS(F0, trg_reg);
76 } else {
77 __ MovD(F0, trg_reg);
78 }
79 }
80 }
81}
82
83static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
84 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
85 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
86}
87
88// Slow-path for fallback (calling the managed code to handle the
89// intrinsic) in an intrinsified call. This will copy the arguments
90// into the positions for a regular call.
91//
92// Note: The actual parameters are required to be in the locations
93// given by the invoke's location summary. If an intrinsic
94// modifies those locations before a slowpath call, they must be
95// restored!
96class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
97 public:
98 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : invoke_(invoke) { }
99
100 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
101 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
102
103 __ Bind(GetEntryLabel());
104
105 SaveLiveRegisters(codegen, invoke_->GetLocations());
106
107 MoveArguments(invoke_, codegen);
108
109 if (invoke_->IsInvokeStaticOrDirect()) {
110 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
111 Location::RegisterLocation(A0));
112 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
113 } else {
114 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
115 UNREACHABLE();
116 }
117
118 // Copy the result back to the expected output.
119 Location out = invoke_->GetLocations()->Out();
120 if (out.IsValid()) {
121 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
122 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
123 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
124 }
125
126 RestoreLiveRegisters(codegen, invoke_->GetLocations());
127 __ B(GetExitLabel());
128 }
129
130 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
131
132 private:
133 // The instruction where this slow path is happening.
134 HInvoke* const invoke_;
135
136 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
137};
138
139#undef __
140
141bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
142 Dispatch(invoke);
143 LocationSummary* res = invoke->GetLocations();
144 return res != nullptr && res->Intrinsified();
145}
146
147#define __ assembler->
148
Chris Larsen3f8bf652015-10-28 10:08:56 -0700149static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
150 LocationSummary* locations = new (arena) LocationSummary(invoke,
151 LocationSummary::kNoCall,
152 kIntrinsified);
153 locations->SetInAt(0, Location::RequiresFpuRegister());
154 locations->SetOut(Location::RequiresRegister());
155}
156
157static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
158 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
159
160 if (is64bit) {
161 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
162 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
163
164 __ Mfc1(out_lo, in);
165 __ Mfhc1(out_hi, in);
166 } else {
167 Register out = locations->Out().AsRegister<Register>();
168
169 __ Mfc1(out, in);
170 }
171}
172
173// long java.lang.Double.doubleToRawLongBits(double)
174void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
175 CreateFPToIntLocations(arena_, invoke);
176}
177
178void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000179 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700180}
181
182// int java.lang.Float.floatToRawIntBits(float)
183void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
184 CreateFPToIntLocations(arena_, invoke);
185}
186
187void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000188 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700189}
190
191static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
192 LocationSummary* locations = new (arena) LocationSummary(invoke,
193 LocationSummary::kNoCall,
194 kIntrinsified);
195 locations->SetInAt(0, Location::RequiresRegister());
196 locations->SetOut(Location::RequiresFpuRegister());
197}
198
199static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
200 FRegister out = locations->Out().AsFpuRegister<FRegister>();
201
202 if (is64bit) {
203 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
204 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
205
206 __ Mtc1(in_lo, out);
207 __ Mthc1(in_hi, out);
208 } else {
209 Register in = locations->InAt(0).AsRegister<Register>();
210
211 __ Mtc1(in, out);
212 }
213}
214
215// double java.lang.Double.longBitsToDouble(long)
216void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
217 CreateIntToFPLocations(arena_, invoke);
218}
219
220void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000221 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700222}
223
224// float java.lang.Float.intBitsToFloat(int)
225void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
226 CreateIntToFPLocations(arena_, invoke);
227}
228
229void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000230 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700231}
232
Chris Larsen86829602015-11-18 12:27:52 -0800233static void CreateIntToIntLocations(ArenaAllocator* arena,
234 HInvoke* invoke,
235 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700236 LocationSummary* locations = new (arena) LocationSummary(invoke,
237 LocationSummary::kNoCall,
238 kIntrinsified);
239 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800240 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700241}
242
Chris Larsen70014c82015-11-18 12:26:08 -0800243static void GenReverse(LocationSummary* locations,
244 Primitive::Type type,
245 bool isR2OrNewer,
246 bool isR6,
247 bool reverseBits,
248 MipsAssembler* assembler) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700249 DCHECK(type == Primitive::kPrimShort ||
250 type == Primitive::kPrimInt ||
251 type == Primitive::kPrimLong);
Chris Larsen70014c82015-11-18 12:26:08 -0800252 DCHECK(type != Primitive::kPrimShort || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700253
254 if (type == Primitive::kPrimShort) {
255 Register in = locations->InAt(0).AsRegister<Register>();
256 Register out = locations->Out().AsRegister<Register>();
257
258 if (isR2OrNewer) {
259 __ Wsbh(out, in);
260 __ Seh(out, out);
261 } else {
262 __ Sll(TMP, in, 24);
263 __ Sra(TMP, TMP, 16);
264 __ Sll(out, in, 16);
265 __ Srl(out, out, 24);
266 __ Or(out, out, TMP);
267 }
268 } else if (type == Primitive::kPrimInt) {
269 Register in = locations->InAt(0).AsRegister<Register>();
270 Register out = locations->Out().AsRegister<Register>();
271
272 if (isR2OrNewer) {
273 __ Rotr(out, in, 16);
274 __ Wsbh(out, out);
275 } else {
276 // MIPS32r1
277 // __ Rotr(out, in, 16);
278 __ Sll(TMP, in, 16);
279 __ Srl(out, in, 16);
280 __ Or(out, out, TMP);
281 // __ Wsbh(out, out);
282 __ LoadConst32(AT, 0x00FF00FF);
283 __ And(TMP, out, AT);
284 __ Sll(TMP, TMP, 8);
285 __ Srl(out, out, 8);
286 __ And(out, out, AT);
287 __ Or(out, out, TMP);
288 }
Chris Larsen70014c82015-11-18 12:26:08 -0800289 if (reverseBits) {
290 if (isR6) {
291 __ Bitswap(out, out);
292 } else {
293 __ LoadConst32(AT, 0x0F0F0F0F);
294 __ And(TMP, out, AT);
295 __ Sll(TMP, TMP, 4);
296 __ Srl(out, out, 4);
297 __ And(out, out, AT);
298 __ Or(out, TMP, out);
299 __ LoadConst32(AT, 0x33333333);
300 __ And(TMP, out, AT);
301 __ Sll(TMP, TMP, 2);
302 __ Srl(out, out, 2);
303 __ And(out, out, AT);
304 __ Or(out, TMP, out);
305 __ LoadConst32(AT, 0x55555555);
306 __ And(TMP, out, AT);
307 __ Sll(TMP, TMP, 1);
308 __ Srl(out, out, 1);
309 __ And(out, out, AT);
310 __ Or(out, TMP, out);
311 }
312 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700313 } else if (type == Primitive::kPrimLong) {
314 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
315 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
316 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
317 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
318
319 if (isR2OrNewer) {
320 __ Rotr(AT, in_hi, 16);
321 __ Rotr(TMP, in_lo, 16);
322 __ Wsbh(out_lo, AT);
323 __ Wsbh(out_hi, TMP);
324 } else {
325 // When calling CreateIntToIntLocations() we promised that the
326 // use of the out_lo/out_hi wouldn't overlap with the use of
327 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
328 // until we're completely done reading from in_lo/in_hi.
329 // __ Rotr(TMP, in_lo, 16);
330 __ Sll(TMP, in_lo, 16);
331 __ Srl(AT, in_lo, 16);
332 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
333 // to write to out_hi.
334 // __ Rotr(out_lo, in_hi, 16);
335 __ Sll(AT, in_hi, 16);
336 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
337 // from in_lo/in_hi so it's okay to
338 // write to out_lo/out_hi.
339 __ Or(out_lo, out_lo, AT);
340 // __ Wsbh(out_hi, out_hi);
341 __ LoadConst32(AT, 0x00FF00FF);
342 __ And(out_hi, TMP, AT);
343 __ Sll(out_hi, out_hi, 8);
344 __ Srl(TMP, TMP, 8);
345 __ And(TMP, TMP, AT);
346 __ Or(out_hi, out_hi, TMP);
347 // __ Wsbh(out_lo, out_lo);
348 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
349 __ Sll(TMP, TMP, 8);
350 __ Srl(out_lo, out_lo, 8);
351 __ And(out_lo, out_lo, AT);
352 __ Or(out_lo, out_lo, TMP);
353 }
Chris Larsen70014c82015-11-18 12:26:08 -0800354 if (reverseBits) {
355 if (isR6) {
356 __ Bitswap(out_hi, out_hi);
357 __ Bitswap(out_lo, out_lo);
358 } else {
359 __ LoadConst32(AT, 0x0F0F0F0F);
360 __ And(TMP, out_hi, AT);
361 __ Sll(TMP, TMP, 4);
362 __ Srl(out_hi, out_hi, 4);
363 __ And(out_hi, out_hi, AT);
364 __ Or(out_hi, TMP, out_hi);
365 __ And(TMP, out_lo, AT);
366 __ Sll(TMP, TMP, 4);
367 __ Srl(out_lo, out_lo, 4);
368 __ And(out_lo, out_lo, AT);
369 __ Or(out_lo, TMP, out_lo);
370 __ LoadConst32(AT, 0x33333333);
371 __ And(TMP, out_hi, AT);
372 __ Sll(TMP, TMP, 2);
373 __ Srl(out_hi, out_hi, 2);
374 __ And(out_hi, out_hi, AT);
375 __ Or(out_hi, TMP, out_hi);
376 __ And(TMP, out_lo, AT);
377 __ Sll(TMP, TMP, 2);
378 __ Srl(out_lo, out_lo, 2);
379 __ And(out_lo, out_lo, AT);
380 __ Or(out_lo, TMP, out_lo);
381 __ LoadConst32(AT, 0x55555555);
382 __ And(TMP, out_hi, AT);
383 __ Sll(TMP, TMP, 1);
384 __ Srl(out_hi, out_hi, 1);
385 __ And(out_hi, out_hi, AT);
386 __ Or(out_hi, TMP, out_hi);
387 __ And(TMP, out_lo, AT);
388 __ Sll(TMP, TMP, 1);
389 __ Srl(out_lo, out_lo, 1);
390 __ And(out_lo, out_lo, AT);
391 __ Or(out_lo, TMP, out_lo);
392 }
393 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700394 }
395}
396
397// int java.lang.Integer.reverseBytes(int)
398void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
399 CreateIntToIntLocations(arena_, invoke);
400}
401
402void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800403 GenReverse(invoke->GetLocations(),
404 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800405 IsR2OrNewer(),
406 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800407 false,
408 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700409}
410
411// long java.lang.Long.reverseBytes(long)
412void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
413 CreateIntToIntLocations(arena_, invoke);
414}
415
416void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800417 GenReverse(invoke->GetLocations(),
418 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800419 IsR2OrNewer(),
420 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800421 false,
422 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700423}
424
425// short java.lang.Short.reverseBytes(short)
426void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
427 CreateIntToIntLocations(arena_, invoke);
428}
429
430void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800431 GenReverse(invoke->GetLocations(),
432 Primitive::kPrimShort,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800433 IsR2OrNewer(),
434 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800435 false,
436 GetAssembler());
437}
438
Chris Larsene3845472015-11-18 12:27:15 -0800439static void GenNumberOfLeadingZeroes(LocationSummary* locations,
440 bool is64bit,
441 bool isR6,
442 MipsAssembler* assembler) {
443 Register out = locations->Out().AsRegister<Register>();
444 if (is64bit) {
445 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
446 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
447
448 if (isR6) {
449 __ ClzR6(AT, in_hi);
450 __ ClzR6(TMP, in_lo);
451 __ Seleqz(TMP, TMP, in_hi);
452 } else {
453 __ ClzR2(AT, in_hi);
454 __ ClzR2(TMP, in_lo);
455 __ Movn(TMP, ZERO, in_hi);
456 }
457 __ Addu(out, AT, TMP);
458 } else {
459 Register in = locations->InAt(0).AsRegister<Register>();
460
461 if (isR6) {
462 __ ClzR6(out, in);
463 } else {
464 __ ClzR2(out, in);
465 }
466 }
467}
468
469// int java.lang.Integer.numberOfLeadingZeros(int i)
470void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
471 CreateIntToIntLocations(arena_, invoke);
472}
473
474void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800475 GenNumberOfLeadingZeroes(invoke->GetLocations(), false, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800476}
477
478// int java.lang.Long.numberOfLeadingZeros(long i)
479void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
480 CreateIntToIntLocations(arena_, invoke);
481}
482
483void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800484 GenNumberOfLeadingZeroes(invoke->GetLocations(), true, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800485}
486
Chris Larsen86829602015-11-18 12:27:52 -0800487static void GenNumberOfTrailingZeroes(LocationSummary* locations,
488 bool is64bit,
489 bool isR6,
490 bool isR2OrNewer,
491 MipsAssembler* assembler) {
492 Register out = locations->Out().AsRegister<Register>();
493 Register in_lo;
494 Register in;
495
496 if (is64bit) {
497 MipsLabel done;
498 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
499
500 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
501
502 // If in_lo is zero then count the number of trailing zeroes in in_hi;
503 // otherwise count the number of trailing zeroes in in_lo.
504 // AT = in_lo ? in_lo : in_hi;
505 if (isR6) {
506 __ Seleqz(out, in_hi, in_lo);
507 __ Selnez(TMP, in_lo, in_lo);
508 __ Or(out, out, TMP);
509 } else {
510 __ Movz(out, in_hi, in_lo);
511 __ Movn(out, in_lo, in_lo);
512 }
513
514 in = out;
515 } else {
516 in = locations->InAt(0).AsRegister<Register>();
517 // Give in_lo a dummy value to keep the compiler from complaining.
518 // Since we only get here in the 32-bit case, this value will never
519 // be used.
520 in_lo = in;
521 }
522
523 // We don't have an instruction to count the number of trailing zeroes.
524 // Start by flipping the bits end-for-end so we can count the number of
525 // leading zeroes instead.
526 if (isR2OrNewer) {
527 __ Rotr(out, in, 16);
528 __ Wsbh(out, out);
529 } else {
530 // MIPS32r1
531 // __ Rotr(out, in, 16);
532 __ Sll(TMP, in, 16);
533 __ Srl(out, in, 16);
534 __ Or(out, out, TMP);
535 // __ Wsbh(out, out);
536 __ LoadConst32(AT, 0x00FF00FF);
537 __ And(TMP, out, AT);
538 __ Sll(TMP, TMP, 8);
539 __ Srl(out, out, 8);
540 __ And(out, out, AT);
541 __ Or(out, out, TMP);
542 }
543
544 if (isR6) {
545 __ Bitswap(out, out);
546 __ ClzR6(out, out);
547 } else {
548 __ LoadConst32(AT, 0x0F0F0F0F);
549 __ And(TMP, out, AT);
550 __ Sll(TMP, TMP, 4);
551 __ Srl(out, out, 4);
552 __ And(out, out, AT);
553 __ Or(out, TMP, out);
554 __ LoadConst32(AT, 0x33333333);
555 __ And(TMP, out, AT);
556 __ Sll(TMP, TMP, 2);
557 __ Srl(out, out, 2);
558 __ And(out, out, AT);
559 __ Or(out, TMP, out);
560 __ LoadConst32(AT, 0x55555555);
561 __ And(TMP, out, AT);
562 __ Sll(TMP, TMP, 1);
563 __ Srl(out, out, 1);
564 __ And(out, out, AT);
565 __ Or(out, TMP, out);
566 __ ClzR2(out, out);
567 }
568
569 if (is64bit) {
570 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
571 // number of trailing zeroes in in_lo (32) to get the correct final count
572 __ LoadConst32(TMP, 32);
573 if (isR6) {
574 __ Seleqz(TMP, TMP, in_lo);
575 } else {
576 __ Movn(TMP, ZERO, in_lo);
577 }
578 __ Addu(out, out, TMP);
579 }
580}
581
582// int java.lang.Integer.numberOfTrailingZeros(int i)
583void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
584 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
585}
586
587void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800588 GenNumberOfTrailingZeroes(invoke->GetLocations(), false, IsR6(), IsR2OrNewer(), GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800589}
590
591// int java.lang.Long.numberOfTrailingZeros(long i)
592void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
593 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
594}
595
596void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800597 GenNumberOfTrailingZeroes(invoke->GetLocations(), true, IsR6(), IsR2OrNewer(), GetAssembler());
598}
599
600enum RotationDirection {
601 kRotateRight,
602 kRotateLeft,
603};
604
605static void GenRotate(HInvoke* invoke,
606 Primitive::Type type,
607 bool isR2OrNewer,
608 RotationDirection direction,
609 MipsAssembler* assembler) {
610 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
611
612 LocationSummary* locations = invoke->GetLocations();
613 if (invoke->InputAt(1)->IsIntConstant()) {
614 int32_t shift = static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
615 if (type == Primitive::kPrimInt) {
616 Register in = locations->InAt(0).AsRegister<Register>();
617 Register out = locations->Out().AsRegister<Register>();
618
619 shift &= 0x1f;
620 if (direction == kRotateLeft) {
621 shift = (32 - shift) & 0x1F;
622 }
623
624 if (isR2OrNewer) {
625 if ((shift != 0) || (out != in)) {
626 __ Rotr(out, in, shift);
627 }
628 } else {
629 if (shift == 0) {
630 if (out != in) {
631 __ Move(out, in);
632 }
633 } else {
634 __ Srl(AT, in, shift);
635 __ Sll(out, in, 32 - shift);
636 __ Or(out, out, AT);
637 }
638 }
639 } else { // Primitive::kPrimLong
640 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
641 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
642 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
643 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
644
645 shift &= 0x3f;
646 if (direction == kRotateLeft) {
647 shift = (64 - shift) & 0x3F;
648 }
649
650 if (shift == 0) {
651 __ Move(out_lo, in_lo);
652 __ Move(out_hi, in_hi);
653 } else if (shift == 32) {
654 __ Move(out_lo, in_hi);
655 __ Move(out_hi, in_lo);
656 } else if (shift < 32) {
657 __ Srl(AT, in_lo, shift);
658 __ Sll(out_lo, in_hi, 32 - shift);
659 __ Or(out_lo, out_lo, AT);
660 __ Srl(AT, in_hi, shift);
661 __ Sll(out_hi, in_lo, 32 - shift);
662 __ Or(out_hi, out_hi, AT);
663 } else {
664 __ Sll(AT, in_lo, 64 - shift);
665 __ Srl(out_lo, in_hi, shift - 32);
666 __ Or(out_lo, out_lo, AT);
667 __ Sll(AT, in_hi, 64 - shift);
668 __ Srl(out_hi, in_lo, shift - 32);
669 __ Or(out_hi, out_hi, AT);
670 }
671 }
672 } else { // !invoke->InputAt(1)->IsIntConstant()
673 Register shamt = locations->InAt(1).AsRegister<Register>();
674 if (type == Primitive::kPrimInt) {
675 Register in = locations->InAt(0).AsRegister<Register>();
676 Register out = locations->Out().AsRegister<Register>();
677
678 if (isR2OrNewer) {
679 if (direction == kRotateRight) {
680 __ Rotrv(out, in, shamt);
681 } else {
682 // negu tmp, shamt
683 __ Subu(TMP, ZERO, shamt);
684 __ Rotrv(out, in, TMP);
685 }
686 } else {
687 if (direction == kRotateRight) {
688 __ Srlv(AT, in, shamt);
689 __ Subu(TMP, ZERO, shamt);
690 __ Sllv(out, in, TMP);
691 __ Or(out, out, AT);
692 } else {
693 __ Sllv(AT, in, shamt);
694 __ Subu(TMP, ZERO, shamt);
695 __ Srlv(out, in, TMP);
696 __ Or(out, out, AT);
697 }
698 }
699 } else { // Primitive::kPrimLong
700 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
701 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
702 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
703 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
704
705 MipsLabel done;
706
707 if (direction == kRotateRight) {
708 __ Nor(TMP, ZERO, shamt);
709 __ Srlv(AT, in_lo, shamt);
710 __ Sll(out_lo, in_hi, 1);
711 __ Sllv(out_lo, out_lo, TMP);
712 __ Or(out_lo, out_lo, AT);
713 __ Srlv(AT, in_hi, shamt);
714 __ Sll(out_hi, in_lo, 1);
715 __ Sllv(out_hi, out_hi, TMP);
716 __ Or(out_hi, out_hi, AT);
717 } else {
718 __ Nor(TMP, ZERO, shamt);
719 __ Sllv(AT, in_lo, shamt);
720 __ Srl(out_lo, in_hi, 1);
721 __ Srlv(out_lo, out_lo, TMP);
722 __ Or(out_lo, out_lo, AT);
723 __ Sllv(AT, in_hi, shamt);
724 __ Srl(out_hi, in_lo, 1);
725 __ Srlv(out_hi, out_hi, TMP);
726 __ Or(out_hi, out_hi, AT);
727 }
728
729 __ Andi(TMP, shamt, 32);
730 __ Beqz(TMP, &done);
731 __ Move(TMP, out_hi);
732 __ Move(out_hi, out_lo);
733 __ Move(out_lo, TMP);
734
735 __ Bind(&done);
736 }
737 }
738}
739
740// int java.lang.Integer.rotateRight(int i, int distance)
741void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateRight(HInvoke* invoke) {
742 LocationSummary* locations = new (arena_) LocationSummary(invoke,
743 LocationSummary::kNoCall,
744 kIntrinsified);
745 locations->SetInAt(0, Location::RequiresRegister());
746 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
747 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
748}
749
750void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateRight(HInvoke* invoke) {
751 GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateRight, GetAssembler());
752}
753
754// long java.lang.Long.rotateRight(long i, int distance)
755void IntrinsicLocationsBuilderMIPS::VisitLongRotateRight(HInvoke* invoke) {
756 LocationSummary* locations = new (arena_) LocationSummary(invoke,
757 LocationSummary::kNoCall,
758 kIntrinsified);
759 locations->SetInAt(0, Location::RequiresRegister());
760 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
761 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
762}
763
764void IntrinsicCodeGeneratorMIPS::VisitLongRotateRight(HInvoke* invoke) {
765 GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateRight, GetAssembler());
766}
767
768// int java.lang.Integer.rotateLeft(int i, int distance)
769void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateLeft(HInvoke* invoke) {
770 LocationSummary* locations = new (arena_) LocationSummary(invoke,
771 LocationSummary::kNoCall,
772 kIntrinsified);
773 locations->SetInAt(0, Location::RequiresRegister());
774 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
775 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
776}
777
778void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateLeft(HInvoke* invoke) {
779 GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateLeft, GetAssembler());
780}
781
782// long java.lang.Long.rotateLeft(long i, int distance)
783void IntrinsicLocationsBuilderMIPS::VisitLongRotateLeft(HInvoke* invoke) {
784 LocationSummary* locations = new (arena_) LocationSummary(invoke,
785 LocationSummary::kNoCall,
786 kIntrinsified);
787 locations->SetInAt(0, Location::RequiresRegister());
788 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
789 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
790}
791
792void IntrinsicCodeGeneratorMIPS::VisitLongRotateLeft(HInvoke* invoke) {
793 GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateLeft, GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800794}
795
Chris Larsen70014c82015-11-18 12:26:08 -0800796// int java.lang.Integer.reverse(int)
797void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
798 CreateIntToIntLocations(arena_, invoke);
799}
800
801void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
802 GenReverse(invoke->GetLocations(),
803 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800804 IsR2OrNewer(),
805 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800806 true,
807 GetAssembler());
808}
809
810// long java.lang.Long.reverse(long)
811void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
812 CreateIntToIntLocations(arena_, invoke);
813}
814
815void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
816 GenReverse(invoke->GetLocations(),
817 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800818 IsR2OrNewer(),
819 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800820 true,
821 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700822}
823
Chris Larsen16ba2b42015-11-02 10:58:31 -0800824// boolean java.lang.String.equals(Object anObject)
825void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
826 LocationSummary* locations = new (arena_) LocationSummary(invoke,
827 LocationSummary::kNoCall,
828 kIntrinsified);
829 locations->SetInAt(0, Location::RequiresRegister());
830 locations->SetInAt(1, Location::RequiresRegister());
831 locations->SetOut(Location::RequiresRegister());
832
833 // Temporary registers to store lengths of strings and for calculations.
834 locations->AddTemp(Location::RequiresRegister());
835 locations->AddTemp(Location::RequiresRegister());
836 locations->AddTemp(Location::RequiresRegister());
837}
838
839void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
840 MipsAssembler* assembler = GetAssembler();
841 LocationSummary* locations = invoke->GetLocations();
842
843 Register str = locations->InAt(0).AsRegister<Register>();
844 Register arg = locations->InAt(1).AsRegister<Register>();
845 Register out = locations->Out().AsRegister<Register>();
846
847 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
848 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
849 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
850
851 MipsLabel loop;
852 MipsLabel end;
853 MipsLabel return_true;
854 MipsLabel return_false;
855
856 // Get offsets of count, value, and class fields within a string object.
857 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
858 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
859 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
860
861 // Note that the null check must have been done earlier.
862 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
863
864 // If the register containing the pointer to "this", and the register
865 // containing the pointer to "anObject" are the same register then
866 // "this", and "anObject" are the same object and we can
867 // short-circuit the logic to a true result.
868 if (str == arg) {
869 __ LoadConst32(out, 1);
870 return;
871 }
872
873 // Check if input is null, return false if it is.
874 __ Beqz(arg, &return_false);
875
876 // Reference equality check, return true if same reference.
877 __ Beq(str, arg, &return_true);
878
879 // Instanceof check for the argument by comparing class fields.
880 // All string objects must have the same type since String cannot be subclassed.
881 // Receiver must be a string object, so its class field is equal to all strings' class fields.
882 // If the argument is a string object, its class field must be equal to receiver's class field.
883 __ Lw(temp1, str, class_offset);
884 __ Lw(temp2, arg, class_offset);
885 __ Bne(temp1, temp2, &return_false);
886
887 // Load lengths of this and argument strings.
888 __ Lw(temp1, str, count_offset);
889 __ Lw(temp2, arg, count_offset);
890 // Check if lengths are equal, return false if they're not.
891 __ Bne(temp1, temp2, &return_false);
892 // Return true if both strings are empty.
893 __ Beqz(temp1, &return_true);
894
895 // Don't overwrite input registers
896 __ Move(TMP, str);
897 __ Move(temp3, arg);
898
899 // Assertions that must hold in order to compare strings 2 characters at a time.
900 DCHECK_ALIGNED(value_offset, 4);
901 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
902
903 // Loop to compare strings 2 characters at a time starting at the beginning of the string.
904 // Ok to do this because strings are zero-padded.
905 __ Bind(&loop);
906 __ Lw(out, TMP, value_offset);
907 __ Lw(temp2, temp3, value_offset);
908 __ Bne(out, temp2, &return_false);
909 __ Addiu(TMP, TMP, 4);
910 __ Addiu(temp3, temp3, 4);
911 __ Addiu(temp1, temp1, -2);
912 __ Bgtz(temp1, &loop);
913
914 // Return true and exit the function.
915 // If loop does not result in returning false, we return true.
916 __ Bind(&return_true);
917 __ LoadConst32(out, 1);
918 __ B(&end);
919
920 // Return false and exit the function.
921 __ Bind(&return_false);
922 __ LoadConst32(out, 0);
923 __ Bind(&end);
924}
925
Chris Larsen701566a2015-10-27 15:29:13 -0700926// Unimplemented intrinsics.
927
928#define UNIMPLEMENTED_INTRINSIC(Name) \
929void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
930} \
931void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
932}
933
Chris Larsen701566a2015-10-27 15:29:13 -0700934UNIMPLEMENTED_INTRINSIC(MathAbsDouble)
935UNIMPLEMENTED_INTRINSIC(MathAbsFloat)
936UNIMPLEMENTED_INTRINSIC(MathAbsInt)
937UNIMPLEMENTED_INTRINSIC(MathAbsLong)
938UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
939UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
940UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
941UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
942UNIMPLEMENTED_INTRINSIC(MathMinIntInt)
943UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
944UNIMPLEMENTED_INTRINSIC(MathMaxIntInt)
945UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
946UNIMPLEMENTED_INTRINSIC(MathSqrt)
947UNIMPLEMENTED_INTRINSIC(MathCeil)
948UNIMPLEMENTED_INTRINSIC(MathFloor)
949UNIMPLEMENTED_INTRINSIC(MathRint)
950UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
951UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
952UNIMPLEMENTED_INTRINSIC(MemoryPeekByte)
953UNIMPLEMENTED_INTRINSIC(MemoryPeekIntNative)
954UNIMPLEMENTED_INTRINSIC(MemoryPeekLongNative)
955UNIMPLEMENTED_INTRINSIC(MemoryPeekShortNative)
956UNIMPLEMENTED_INTRINSIC(MemoryPokeByte)
957UNIMPLEMENTED_INTRINSIC(MemoryPokeIntNative)
958UNIMPLEMENTED_INTRINSIC(MemoryPokeLongNative)
959UNIMPLEMENTED_INTRINSIC(MemoryPokeShortNative)
960UNIMPLEMENTED_INTRINSIC(ThreadCurrentThread)
961UNIMPLEMENTED_INTRINSIC(UnsafeGet)
962UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile)
963UNIMPLEMENTED_INTRINSIC(UnsafeGetLong)
964UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile)
965UNIMPLEMENTED_INTRINSIC(UnsafeGetObject)
966UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile)
967UNIMPLEMENTED_INTRINSIC(UnsafePut)
968UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered)
969UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile)
970UNIMPLEMENTED_INTRINSIC(UnsafePutObject)
971UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered)
972UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile)
973UNIMPLEMENTED_INTRINSIC(UnsafePutLong)
974UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered)
975UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
976UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
977UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
978UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
979UNIMPLEMENTED_INTRINSIC(StringCharAt)
980UNIMPLEMENTED_INTRINSIC(StringCompareTo)
Chris Larsen701566a2015-10-27 15:29:13 -0700981UNIMPLEMENTED_INTRINSIC(StringIndexOf)
982UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
983UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
984UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
985UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
Chris Larsen701566a2015-10-27 15:29:13 -0700986
987UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
988UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
989UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
990UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
991
Mark Mendella4f12202015-08-06 15:23:34 -0400992UNIMPLEMENTED_INTRINSIC(MathCos)
993UNIMPLEMENTED_INTRINSIC(MathSin)
994UNIMPLEMENTED_INTRINSIC(MathAcos)
995UNIMPLEMENTED_INTRINSIC(MathAsin)
996UNIMPLEMENTED_INTRINSIC(MathAtan)
997UNIMPLEMENTED_INTRINSIC(MathAtan2)
998UNIMPLEMENTED_INTRINSIC(MathCbrt)
999UNIMPLEMENTED_INTRINSIC(MathCosh)
1000UNIMPLEMENTED_INTRINSIC(MathExp)
1001UNIMPLEMENTED_INTRINSIC(MathExpm1)
1002UNIMPLEMENTED_INTRINSIC(MathHypot)
1003UNIMPLEMENTED_INTRINSIC(MathLog)
1004UNIMPLEMENTED_INTRINSIC(MathLog10)
1005UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1006UNIMPLEMENTED_INTRINSIC(MathSinh)
1007UNIMPLEMENTED_INTRINSIC(MathTan)
1008UNIMPLEMENTED_INTRINSIC(MathTanh)
Chris Larsen701566a2015-10-27 15:29:13 -07001009#undef UNIMPLEMENTED_INTRINSIC
1010
1011#undef __
1012
1013} // namespace mips
1014} // namespace art