blob: bc126a271644d634c40a701f3703200b0889558c [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips/assembler_mips.h"
28#include "utils/mips/constants_mips.h"
29
30namespace art {
31
32namespace mips {
33
34IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
39 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Alexey Frunzebb9863a2016-01-11 15:51:16 -080046inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080047 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
48}
49
Alexey Frunzebb9863a2016-01-11 15:51:16 -080050inline bool IntrinsicCodeGeneratorMIPS::IsR6() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080051 return codegen_->GetInstructionSetFeatures().IsR6();
52}
53
Alexey Frunzebb9863a2016-01-11 15:51:16 -080054inline bool IntrinsicCodeGeneratorMIPS::Is32BitFPU() const {
55 return codegen_->GetInstructionSetFeatures().Is32BitFloatingPoint();
56}
57
Chris Larsen701566a2015-10-27 15:29:13 -070058#define __ codegen->GetAssembler()->
59
60static void MoveFromReturnRegister(Location trg,
61 Primitive::Type type,
62 CodeGeneratorMIPS* codegen) {
63 if (!trg.IsValid()) {
64 DCHECK_EQ(type, Primitive::kPrimVoid);
65 return;
66 }
67
68 DCHECK_NE(type, Primitive::kPrimVoid);
69
70 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
71 Register trg_reg = trg.AsRegister<Register>();
72 if (trg_reg != V0) {
73 __ Move(V0, trg_reg);
74 }
75 } else {
76 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
77 if (trg_reg != F0) {
78 if (type == Primitive::kPrimFloat) {
79 __ MovS(F0, trg_reg);
80 } else {
81 __ MovD(F0, trg_reg);
82 }
83 }
84 }
85}
86
87static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
88 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
89 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
90}
91
92// Slow-path for fallback (calling the managed code to handle the
93// intrinsic) in an intrinsified call. This will copy the arguments
94// into the positions for a regular call.
95//
96// Note: The actual parameters are required to be in the locations
97// given by the invoke's location summary. If an intrinsic
98// modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
101 public:
102 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
106
107 __ Bind(GetEntryLabel());
108
109 SaveLiveRegisters(codegen, invoke_->GetLocations());
110
111 MoveArguments(invoke_, codegen);
112
113 if (invoke_->IsInvokeStaticOrDirect()) {
114 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
115 Location::RegisterLocation(A0));
116 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
117 } else {
118 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
119 UNREACHABLE();
120 }
121
122 // Copy the result back to the expected output.
123 Location out = invoke_->GetLocations()->Out();
124 if (out.IsValid()) {
125 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
126 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
127 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
128 }
129
130 RestoreLiveRegisters(codegen, invoke_->GetLocations());
131 __ B(GetExitLabel());
132 }
133
134 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
135
136 private:
137 // The instruction where this slow path is happening.
138 HInvoke* const invoke_;
139
140 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
141};
142
143#undef __
144
145bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
146 Dispatch(invoke);
147 LocationSummary* res = invoke->GetLocations();
148 return res != nullptr && res->Intrinsified();
149}
150
151#define __ assembler->
152
Chris Larsen3f8bf652015-10-28 10:08:56 -0700153static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
154 LocationSummary* locations = new (arena) LocationSummary(invoke,
155 LocationSummary::kNoCall,
156 kIntrinsified);
157 locations->SetInAt(0, Location::RequiresFpuRegister());
158 locations->SetOut(Location::RequiresRegister());
159}
160
161static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
162 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
163
164 if (is64bit) {
165 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
166 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
167
168 __ Mfc1(out_lo, in);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800169 __ MoveFromFpuHigh(out_hi, in);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700170 } else {
171 Register out = locations->Out().AsRegister<Register>();
172
173 __ Mfc1(out, in);
174 }
175}
176
177// long java.lang.Double.doubleToRawLongBits(double)
178void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
179 CreateFPToIntLocations(arena_, invoke);
180}
181
182void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000183 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700184}
185
186// int java.lang.Float.floatToRawIntBits(float)
187void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
188 CreateFPToIntLocations(arena_, invoke);
189}
190
191void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000192 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700193}
194
195static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
196 LocationSummary* locations = new (arena) LocationSummary(invoke,
197 LocationSummary::kNoCall,
198 kIntrinsified);
199 locations->SetInAt(0, Location::RequiresRegister());
200 locations->SetOut(Location::RequiresFpuRegister());
201}
202
203static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
204 FRegister out = locations->Out().AsFpuRegister<FRegister>();
205
206 if (is64bit) {
207 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
208 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
209
210 __ Mtc1(in_lo, out);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800211 __ MoveToFpuHigh(in_hi, out);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700212 } else {
213 Register in = locations->InAt(0).AsRegister<Register>();
214
215 __ Mtc1(in, out);
216 }
217}
218
219// double java.lang.Double.longBitsToDouble(long)
220void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
221 CreateIntToFPLocations(arena_, invoke);
222}
223
224void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000225 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700226}
227
228// float java.lang.Float.intBitsToFloat(int)
229void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
230 CreateIntToFPLocations(arena_, invoke);
231}
232
233void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000234 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700235}
236
Chris Larsen86829602015-11-18 12:27:52 -0800237static void CreateIntToIntLocations(ArenaAllocator* arena,
238 HInvoke* invoke,
239 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700240 LocationSummary* locations = new (arena) LocationSummary(invoke,
241 LocationSummary::kNoCall,
242 kIntrinsified);
243 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800244 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700245}
246
Chris Larsen70014c82015-11-18 12:26:08 -0800247static void GenReverse(LocationSummary* locations,
248 Primitive::Type type,
249 bool isR2OrNewer,
250 bool isR6,
251 bool reverseBits,
252 MipsAssembler* assembler) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700253 DCHECK(type == Primitive::kPrimShort ||
254 type == Primitive::kPrimInt ||
255 type == Primitive::kPrimLong);
Chris Larsen70014c82015-11-18 12:26:08 -0800256 DCHECK(type != Primitive::kPrimShort || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700257
258 if (type == Primitive::kPrimShort) {
259 Register in = locations->InAt(0).AsRegister<Register>();
260 Register out = locations->Out().AsRegister<Register>();
261
262 if (isR2OrNewer) {
263 __ Wsbh(out, in);
264 __ Seh(out, out);
265 } else {
266 __ Sll(TMP, in, 24);
267 __ Sra(TMP, TMP, 16);
268 __ Sll(out, in, 16);
269 __ Srl(out, out, 24);
270 __ Or(out, out, TMP);
271 }
272 } else if (type == Primitive::kPrimInt) {
273 Register in = locations->InAt(0).AsRegister<Register>();
274 Register out = locations->Out().AsRegister<Register>();
275
276 if (isR2OrNewer) {
277 __ Rotr(out, in, 16);
278 __ Wsbh(out, out);
279 } else {
280 // MIPS32r1
281 // __ Rotr(out, in, 16);
282 __ Sll(TMP, in, 16);
283 __ Srl(out, in, 16);
284 __ Or(out, out, TMP);
285 // __ Wsbh(out, out);
286 __ LoadConst32(AT, 0x00FF00FF);
287 __ And(TMP, out, AT);
288 __ Sll(TMP, TMP, 8);
289 __ Srl(out, out, 8);
290 __ And(out, out, AT);
291 __ Or(out, out, TMP);
292 }
Chris Larsen70014c82015-11-18 12:26:08 -0800293 if (reverseBits) {
294 if (isR6) {
295 __ Bitswap(out, out);
296 } else {
297 __ LoadConst32(AT, 0x0F0F0F0F);
298 __ And(TMP, out, AT);
299 __ Sll(TMP, TMP, 4);
300 __ Srl(out, out, 4);
301 __ And(out, out, AT);
302 __ Or(out, TMP, out);
303 __ LoadConst32(AT, 0x33333333);
304 __ And(TMP, out, AT);
305 __ Sll(TMP, TMP, 2);
306 __ Srl(out, out, 2);
307 __ And(out, out, AT);
308 __ Or(out, TMP, out);
309 __ LoadConst32(AT, 0x55555555);
310 __ And(TMP, out, AT);
311 __ Sll(TMP, TMP, 1);
312 __ Srl(out, out, 1);
313 __ And(out, out, AT);
314 __ Or(out, TMP, out);
315 }
316 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700317 } else if (type == Primitive::kPrimLong) {
318 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
319 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
320 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
321 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
322
323 if (isR2OrNewer) {
324 __ Rotr(AT, in_hi, 16);
325 __ Rotr(TMP, in_lo, 16);
326 __ Wsbh(out_lo, AT);
327 __ Wsbh(out_hi, TMP);
328 } else {
329 // When calling CreateIntToIntLocations() we promised that the
330 // use of the out_lo/out_hi wouldn't overlap with the use of
331 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
332 // until we're completely done reading from in_lo/in_hi.
333 // __ Rotr(TMP, in_lo, 16);
334 __ Sll(TMP, in_lo, 16);
335 __ Srl(AT, in_lo, 16);
336 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
337 // to write to out_hi.
338 // __ Rotr(out_lo, in_hi, 16);
339 __ Sll(AT, in_hi, 16);
340 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
341 // from in_lo/in_hi so it's okay to
342 // write to out_lo/out_hi.
343 __ Or(out_lo, out_lo, AT);
344 // __ Wsbh(out_hi, out_hi);
345 __ LoadConst32(AT, 0x00FF00FF);
346 __ And(out_hi, TMP, AT);
347 __ Sll(out_hi, out_hi, 8);
348 __ Srl(TMP, TMP, 8);
349 __ And(TMP, TMP, AT);
350 __ Or(out_hi, out_hi, TMP);
351 // __ Wsbh(out_lo, out_lo);
352 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
353 __ Sll(TMP, TMP, 8);
354 __ Srl(out_lo, out_lo, 8);
355 __ And(out_lo, out_lo, AT);
356 __ Or(out_lo, out_lo, TMP);
357 }
Chris Larsen70014c82015-11-18 12:26:08 -0800358 if (reverseBits) {
359 if (isR6) {
360 __ Bitswap(out_hi, out_hi);
361 __ Bitswap(out_lo, out_lo);
362 } else {
363 __ LoadConst32(AT, 0x0F0F0F0F);
364 __ And(TMP, out_hi, AT);
365 __ Sll(TMP, TMP, 4);
366 __ Srl(out_hi, out_hi, 4);
367 __ And(out_hi, out_hi, AT);
368 __ Or(out_hi, TMP, out_hi);
369 __ And(TMP, out_lo, AT);
370 __ Sll(TMP, TMP, 4);
371 __ Srl(out_lo, out_lo, 4);
372 __ And(out_lo, out_lo, AT);
373 __ Or(out_lo, TMP, out_lo);
374 __ LoadConst32(AT, 0x33333333);
375 __ And(TMP, out_hi, AT);
376 __ Sll(TMP, TMP, 2);
377 __ Srl(out_hi, out_hi, 2);
378 __ And(out_hi, out_hi, AT);
379 __ Or(out_hi, TMP, out_hi);
380 __ And(TMP, out_lo, AT);
381 __ Sll(TMP, TMP, 2);
382 __ Srl(out_lo, out_lo, 2);
383 __ And(out_lo, out_lo, AT);
384 __ Or(out_lo, TMP, out_lo);
385 __ LoadConst32(AT, 0x55555555);
386 __ And(TMP, out_hi, AT);
387 __ Sll(TMP, TMP, 1);
388 __ Srl(out_hi, out_hi, 1);
389 __ And(out_hi, out_hi, AT);
390 __ Or(out_hi, TMP, out_hi);
391 __ And(TMP, out_lo, AT);
392 __ Sll(TMP, TMP, 1);
393 __ Srl(out_lo, out_lo, 1);
394 __ And(out_lo, out_lo, AT);
395 __ Or(out_lo, TMP, out_lo);
396 }
397 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700398 }
399}
400
401// int java.lang.Integer.reverseBytes(int)
402void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
403 CreateIntToIntLocations(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800407 GenReverse(invoke->GetLocations(),
408 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800409 IsR2OrNewer(),
410 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800411 false,
412 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700413}
414
415// long java.lang.Long.reverseBytes(long)
416void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
417 CreateIntToIntLocations(arena_, invoke);
418}
419
420void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800421 GenReverse(invoke->GetLocations(),
422 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800423 IsR2OrNewer(),
424 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800425 false,
426 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700427}
428
429// short java.lang.Short.reverseBytes(short)
430void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
431 CreateIntToIntLocations(arena_, invoke);
432}
433
434void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800435 GenReverse(invoke->GetLocations(),
436 Primitive::kPrimShort,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800437 IsR2OrNewer(),
438 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800439 false,
440 GetAssembler());
441}
442
Chris Larsene3845472015-11-18 12:27:15 -0800443static void GenNumberOfLeadingZeroes(LocationSummary* locations,
444 bool is64bit,
445 bool isR6,
446 MipsAssembler* assembler) {
447 Register out = locations->Out().AsRegister<Register>();
448 if (is64bit) {
449 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
450 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
451
452 if (isR6) {
453 __ ClzR6(AT, in_hi);
454 __ ClzR6(TMP, in_lo);
455 __ Seleqz(TMP, TMP, in_hi);
456 } else {
457 __ ClzR2(AT, in_hi);
458 __ ClzR2(TMP, in_lo);
459 __ Movn(TMP, ZERO, in_hi);
460 }
461 __ Addu(out, AT, TMP);
462 } else {
463 Register in = locations->InAt(0).AsRegister<Register>();
464
465 if (isR6) {
466 __ ClzR6(out, in);
467 } else {
468 __ ClzR2(out, in);
469 }
470 }
471}
472
473// int java.lang.Integer.numberOfLeadingZeros(int i)
474void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
475 CreateIntToIntLocations(arena_, invoke);
476}
477
478void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800479 GenNumberOfLeadingZeroes(invoke->GetLocations(), false, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800480}
481
482// int java.lang.Long.numberOfLeadingZeros(long i)
483void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
484 CreateIntToIntLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800488 GenNumberOfLeadingZeroes(invoke->GetLocations(), true, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800489}
490
Chris Larsen86829602015-11-18 12:27:52 -0800491static void GenNumberOfTrailingZeroes(LocationSummary* locations,
492 bool is64bit,
493 bool isR6,
494 bool isR2OrNewer,
495 MipsAssembler* assembler) {
496 Register out = locations->Out().AsRegister<Register>();
497 Register in_lo;
498 Register in;
499
500 if (is64bit) {
501 MipsLabel done;
502 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
503
504 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
505
506 // If in_lo is zero then count the number of trailing zeroes in in_hi;
507 // otherwise count the number of trailing zeroes in in_lo.
508 // AT = in_lo ? in_lo : in_hi;
509 if (isR6) {
510 __ Seleqz(out, in_hi, in_lo);
511 __ Selnez(TMP, in_lo, in_lo);
512 __ Or(out, out, TMP);
513 } else {
514 __ Movz(out, in_hi, in_lo);
515 __ Movn(out, in_lo, in_lo);
516 }
517
518 in = out;
519 } else {
520 in = locations->InAt(0).AsRegister<Register>();
521 // Give in_lo a dummy value to keep the compiler from complaining.
522 // Since we only get here in the 32-bit case, this value will never
523 // be used.
524 in_lo = in;
525 }
526
527 // We don't have an instruction to count the number of trailing zeroes.
528 // Start by flipping the bits end-for-end so we can count the number of
529 // leading zeroes instead.
530 if (isR2OrNewer) {
531 __ Rotr(out, in, 16);
532 __ Wsbh(out, out);
533 } else {
534 // MIPS32r1
535 // __ Rotr(out, in, 16);
536 __ Sll(TMP, in, 16);
537 __ Srl(out, in, 16);
538 __ Or(out, out, TMP);
539 // __ Wsbh(out, out);
540 __ LoadConst32(AT, 0x00FF00FF);
541 __ And(TMP, out, AT);
542 __ Sll(TMP, TMP, 8);
543 __ Srl(out, out, 8);
544 __ And(out, out, AT);
545 __ Or(out, out, TMP);
546 }
547
548 if (isR6) {
549 __ Bitswap(out, out);
550 __ ClzR6(out, out);
551 } else {
552 __ LoadConst32(AT, 0x0F0F0F0F);
553 __ And(TMP, out, AT);
554 __ Sll(TMP, TMP, 4);
555 __ Srl(out, out, 4);
556 __ And(out, out, AT);
557 __ Or(out, TMP, out);
558 __ LoadConst32(AT, 0x33333333);
559 __ And(TMP, out, AT);
560 __ Sll(TMP, TMP, 2);
561 __ Srl(out, out, 2);
562 __ And(out, out, AT);
563 __ Or(out, TMP, out);
564 __ LoadConst32(AT, 0x55555555);
565 __ And(TMP, out, AT);
566 __ Sll(TMP, TMP, 1);
567 __ Srl(out, out, 1);
568 __ And(out, out, AT);
569 __ Or(out, TMP, out);
570 __ ClzR2(out, out);
571 }
572
573 if (is64bit) {
574 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
575 // number of trailing zeroes in in_lo (32) to get the correct final count
576 __ LoadConst32(TMP, 32);
577 if (isR6) {
578 __ Seleqz(TMP, TMP, in_lo);
579 } else {
580 __ Movn(TMP, ZERO, in_lo);
581 }
582 __ Addu(out, out, TMP);
583 }
584}
585
586// int java.lang.Integer.numberOfTrailingZeros(int i)
587void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
588 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
589}
590
591void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800592 GenNumberOfTrailingZeroes(invoke->GetLocations(), false, IsR6(), IsR2OrNewer(), GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800593}
594
595// int java.lang.Long.numberOfTrailingZeros(long i)
596void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
597 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
598}
599
600void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsene16ce5a2015-11-18 12:30:20 -0800601 GenNumberOfTrailingZeroes(invoke->GetLocations(), true, IsR6(), IsR2OrNewer(), GetAssembler());
602}
603
604enum RotationDirection {
605 kRotateRight,
606 kRotateLeft,
607};
608
609static void GenRotate(HInvoke* invoke,
610 Primitive::Type type,
611 bool isR2OrNewer,
612 RotationDirection direction,
613 MipsAssembler* assembler) {
614 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
615
616 LocationSummary* locations = invoke->GetLocations();
617 if (invoke->InputAt(1)->IsIntConstant()) {
618 int32_t shift = static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
619 if (type == Primitive::kPrimInt) {
620 Register in = locations->InAt(0).AsRegister<Register>();
621 Register out = locations->Out().AsRegister<Register>();
622
623 shift &= 0x1f;
624 if (direction == kRotateLeft) {
625 shift = (32 - shift) & 0x1F;
626 }
627
628 if (isR2OrNewer) {
629 if ((shift != 0) || (out != in)) {
630 __ Rotr(out, in, shift);
631 }
632 } else {
633 if (shift == 0) {
634 if (out != in) {
635 __ Move(out, in);
636 }
637 } else {
638 __ Srl(AT, in, shift);
639 __ Sll(out, in, 32 - shift);
640 __ Or(out, out, AT);
641 }
642 }
643 } else { // Primitive::kPrimLong
644 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
645 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
646 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
647 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
648
649 shift &= 0x3f;
650 if (direction == kRotateLeft) {
651 shift = (64 - shift) & 0x3F;
652 }
653
654 if (shift == 0) {
655 __ Move(out_lo, in_lo);
656 __ Move(out_hi, in_hi);
657 } else if (shift == 32) {
658 __ Move(out_lo, in_hi);
659 __ Move(out_hi, in_lo);
660 } else if (shift < 32) {
661 __ Srl(AT, in_lo, shift);
662 __ Sll(out_lo, in_hi, 32 - shift);
663 __ Or(out_lo, out_lo, AT);
664 __ Srl(AT, in_hi, shift);
665 __ Sll(out_hi, in_lo, 32 - shift);
666 __ Or(out_hi, out_hi, AT);
667 } else {
668 __ Sll(AT, in_lo, 64 - shift);
669 __ Srl(out_lo, in_hi, shift - 32);
670 __ Or(out_lo, out_lo, AT);
671 __ Sll(AT, in_hi, 64 - shift);
672 __ Srl(out_hi, in_lo, shift - 32);
673 __ Or(out_hi, out_hi, AT);
674 }
675 }
676 } else { // !invoke->InputAt(1)->IsIntConstant()
677 Register shamt = locations->InAt(1).AsRegister<Register>();
678 if (type == Primitive::kPrimInt) {
679 Register in = locations->InAt(0).AsRegister<Register>();
680 Register out = locations->Out().AsRegister<Register>();
681
682 if (isR2OrNewer) {
683 if (direction == kRotateRight) {
684 __ Rotrv(out, in, shamt);
685 } else {
686 // negu tmp, shamt
687 __ Subu(TMP, ZERO, shamt);
688 __ Rotrv(out, in, TMP);
689 }
690 } else {
691 if (direction == kRotateRight) {
692 __ Srlv(AT, in, shamt);
693 __ Subu(TMP, ZERO, shamt);
694 __ Sllv(out, in, TMP);
695 __ Or(out, out, AT);
696 } else {
697 __ Sllv(AT, in, shamt);
698 __ Subu(TMP, ZERO, shamt);
699 __ Srlv(out, in, TMP);
700 __ Or(out, out, AT);
701 }
702 }
703 } else { // Primitive::kPrimLong
704 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
705 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
706 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
707 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
708
709 MipsLabel done;
710
711 if (direction == kRotateRight) {
712 __ Nor(TMP, ZERO, shamt);
713 __ Srlv(AT, in_lo, shamt);
714 __ Sll(out_lo, in_hi, 1);
715 __ Sllv(out_lo, out_lo, TMP);
716 __ Or(out_lo, out_lo, AT);
717 __ Srlv(AT, in_hi, shamt);
718 __ Sll(out_hi, in_lo, 1);
719 __ Sllv(out_hi, out_hi, TMP);
720 __ Or(out_hi, out_hi, AT);
721 } else {
722 __ Nor(TMP, ZERO, shamt);
723 __ Sllv(AT, in_lo, shamt);
724 __ Srl(out_lo, in_hi, 1);
725 __ Srlv(out_lo, out_lo, TMP);
726 __ Or(out_lo, out_lo, AT);
727 __ Sllv(AT, in_hi, shamt);
728 __ Srl(out_hi, in_lo, 1);
729 __ Srlv(out_hi, out_hi, TMP);
730 __ Or(out_hi, out_hi, AT);
731 }
732
733 __ Andi(TMP, shamt, 32);
734 __ Beqz(TMP, &done);
735 __ Move(TMP, out_hi);
736 __ Move(out_hi, out_lo);
737 __ Move(out_lo, TMP);
738
739 __ Bind(&done);
740 }
741 }
742}
743
744// int java.lang.Integer.rotateRight(int i, int distance)
745void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateRight(HInvoke* invoke) {
746 LocationSummary* locations = new (arena_) LocationSummary(invoke,
747 LocationSummary::kNoCall,
748 kIntrinsified);
749 locations->SetInAt(0, Location::RequiresRegister());
750 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
751 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
752}
753
754void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateRight(HInvoke* invoke) {
755 GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateRight, GetAssembler());
756}
757
758// long java.lang.Long.rotateRight(long i, int distance)
759void IntrinsicLocationsBuilderMIPS::VisitLongRotateRight(HInvoke* invoke) {
760 LocationSummary* locations = new (arena_) LocationSummary(invoke,
761 LocationSummary::kNoCall,
762 kIntrinsified);
763 locations->SetInAt(0, Location::RequiresRegister());
764 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
765 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
766}
767
768void IntrinsicCodeGeneratorMIPS::VisitLongRotateRight(HInvoke* invoke) {
769 GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateRight, GetAssembler());
770}
771
772// int java.lang.Integer.rotateLeft(int i, int distance)
773void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateLeft(HInvoke* invoke) {
774 LocationSummary* locations = new (arena_) LocationSummary(invoke,
775 LocationSummary::kNoCall,
776 kIntrinsified);
777 locations->SetInAt(0, Location::RequiresRegister());
778 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
779 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
780}
781
782void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateLeft(HInvoke* invoke) {
783 GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateLeft, GetAssembler());
784}
785
786// long java.lang.Long.rotateLeft(long i, int distance)
787void IntrinsicLocationsBuilderMIPS::VisitLongRotateLeft(HInvoke* invoke) {
788 LocationSummary* locations = new (arena_) LocationSummary(invoke,
789 LocationSummary::kNoCall,
790 kIntrinsified);
791 locations->SetInAt(0, Location::RequiresRegister());
792 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
793 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
794}
795
796void IntrinsicCodeGeneratorMIPS::VisitLongRotateLeft(HInvoke* invoke) {
797 GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateLeft, GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800798}
799
Chris Larsen70014c82015-11-18 12:26:08 -0800800// int java.lang.Integer.reverse(int)
801void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
802 CreateIntToIntLocations(arena_, invoke);
803}
804
805void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
806 GenReverse(invoke->GetLocations(),
807 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800808 IsR2OrNewer(),
809 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800810 true,
811 GetAssembler());
812}
813
814// long java.lang.Long.reverse(long)
815void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
816 CreateIntToIntLocations(arena_, invoke);
817}
818
819void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
820 GenReverse(invoke->GetLocations(),
821 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800822 IsR2OrNewer(),
823 IsR6(),
Chris Larsen70014c82015-11-18 12:26:08 -0800824 true,
825 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700826}
827
Chris Larsen16ba2b42015-11-02 10:58:31 -0800828// boolean java.lang.String.equals(Object anObject)
829void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
830 LocationSummary* locations = new (arena_) LocationSummary(invoke,
831 LocationSummary::kNoCall,
832 kIntrinsified);
833 locations->SetInAt(0, Location::RequiresRegister());
834 locations->SetInAt(1, Location::RequiresRegister());
835 locations->SetOut(Location::RequiresRegister());
836
837 // Temporary registers to store lengths of strings and for calculations.
838 locations->AddTemp(Location::RequiresRegister());
839 locations->AddTemp(Location::RequiresRegister());
840 locations->AddTemp(Location::RequiresRegister());
841}
842
843void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
844 MipsAssembler* assembler = GetAssembler();
845 LocationSummary* locations = invoke->GetLocations();
846
847 Register str = locations->InAt(0).AsRegister<Register>();
848 Register arg = locations->InAt(1).AsRegister<Register>();
849 Register out = locations->Out().AsRegister<Register>();
850
851 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
852 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
853 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
854
855 MipsLabel loop;
856 MipsLabel end;
857 MipsLabel return_true;
858 MipsLabel return_false;
859
860 // Get offsets of count, value, and class fields within a string object.
861 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
862 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
863 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
864
865 // Note that the null check must have been done earlier.
866 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
867
868 // If the register containing the pointer to "this", and the register
869 // containing the pointer to "anObject" are the same register then
870 // "this", and "anObject" are the same object and we can
871 // short-circuit the logic to a true result.
872 if (str == arg) {
873 __ LoadConst32(out, 1);
874 return;
875 }
876
877 // Check if input is null, return false if it is.
878 __ Beqz(arg, &return_false);
879
880 // Reference equality check, return true if same reference.
881 __ Beq(str, arg, &return_true);
882
883 // Instanceof check for the argument by comparing class fields.
884 // All string objects must have the same type since String cannot be subclassed.
885 // Receiver must be a string object, so its class field is equal to all strings' class fields.
886 // If the argument is a string object, its class field must be equal to receiver's class field.
887 __ Lw(temp1, str, class_offset);
888 __ Lw(temp2, arg, class_offset);
889 __ Bne(temp1, temp2, &return_false);
890
891 // Load lengths of this and argument strings.
892 __ Lw(temp1, str, count_offset);
893 __ Lw(temp2, arg, count_offset);
894 // Check if lengths are equal, return false if they're not.
895 __ Bne(temp1, temp2, &return_false);
896 // Return true if both strings are empty.
897 __ Beqz(temp1, &return_true);
898
899 // Don't overwrite input registers
900 __ Move(TMP, str);
901 __ Move(temp3, arg);
902
903 // Assertions that must hold in order to compare strings 2 characters at a time.
904 DCHECK_ALIGNED(value_offset, 4);
905 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
906
907 // Loop to compare strings 2 characters at a time starting at the beginning of the string.
908 // Ok to do this because strings are zero-padded.
909 __ Bind(&loop);
910 __ Lw(out, TMP, value_offset);
911 __ Lw(temp2, temp3, value_offset);
912 __ Bne(out, temp2, &return_false);
913 __ Addiu(TMP, TMP, 4);
914 __ Addiu(temp3, temp3, 4);
915 __ Addiu(temp1, temp1, -2);
916 __ Bgtz(temp1, &loop);
917
918 // Return true and exit the function.
919 // If loop does not result in returning false, we return true.
920 __ Bind(&return_true);
921 __ LoadConst32(out, 1);
922 __ B(&end);
923
924 // Return false and exit the function.
925 __ Bind(&return_false);
926 __ LoadConst32(out, 0);
927 __ Bind(&end);
928}
929
Chris Larsen701566a2015-10-27 15:29:13 -0700930// Unimplemented intrinsics.
931
932#define UNIMPLEMENTED_INTRINSIC(Name) \
933void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
934} \
935void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
936}
937
Chris Larsen701566a2015-10-27 15:29:13 -0700938UNIMPLEMENTED_INTRINSIC(MathAbsDouble)
939UNIMPLEMENTED_INTRINSIC(MathAbsFloat)
940UNIMPLEMENTED_INTRINSIC(MathAbsInt)
941UNIMPLEMENTED_INTRINSIC(MathAbsLong)
942UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
943UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
944UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
945UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
946UNIMPLEMENTED_INTRINSIC(MathMinIntInt)
947UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
948UNIMPLEMENTED_INTRINSIC(MathMaxIntInt)
949UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
950UNIMPLEMENTED_INTRINSIC(MathSqrt)
951UNIMPLEMENTED_INTRINSIC(MathCeil)
952UNIMPLEMENTED_INTRINSIC(MathFloor)
953UNIMPLEMENTED_INTRINSIC(MathRint)
954UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
955UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
956UNIMPLEMENTED_INTRINSIC(MemoryPeekByte)
957UNIMPLEMENTED_INTRINSIC(MemoryPeekIntNative)
958UNIMPLEMENTED_INTRINSIC(MemoryPeekLongNative)
959UNIMPLEMENTED_INTRINSIC(MemoryPeekShortNative)
960UNIMPLEMENTED_INTRINSIC(MemoryPokeByte)
961UNIMPLEMENTED_INTRINSIC(MemoryPokeIntNative)
962UNIMPLEMENTED_INTRINSIC(MemoryPokeLongNative)
963UNIMPLEMENTED_INTRINSIC(MemoryPokeShortNative)
964UNIMPLEMENTED_INTRINSIC(ThreadCurrentThread)
965UNIMPLEMENTED_INTRINSIC(UnsafeGet)
966UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile)
967UNIMPLEMENTED_INTRINSIC(UnsafeGetLong)
968UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile)
969UNIMPLEMENTED_INTRINSIC(UnsafeGetObject)
970UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile)
971UNIMPLEMENTED_INTRINSIC(UnsafePut)
972UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered)
973UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile)
974UNIMPLEMENTED_INTRINSIC(UnsafePutObject)
975UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered)
976UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile)
977UNIMPLEMENTED_INTRINSIC(UnsafePutLong)
978UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered)
979UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
980UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
981UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
982UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
983UNIMPLEMENTED_INTRINSIC(StringCharAt)
984UNIMPLEMENTED_INTRINSIC(StringCompareTo)
Chris Larsen701566a2015-10-27 15:29:13 -0700985UNIMPLEMENTED_INTRINSIC(StringIndexOf)
986UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
987UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
988UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
989UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
Chris Larsen701566a2015-10-27 15:29:13 -0700990
991UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
992UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
993UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
994UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
995
Mark Mendella4f12202015-08-06 15:23:34 -0400996UNIMPLEMENTED_INTRINSIC(MathCos)
997UNIMPLEMENTED_INTRINSIC(MathSin)
998UNIMPLEMENTED_INTRINSIC(MathAcos)
999UNIMPLEMENTED_INTRINSIC(MathAsin)
1000UNIMPLEMENTED_INTRINSIC(MathAtan)
1001UNIMPLEMENTED_INTRINSIC(MathAtan2)
1002UNIMPLEMENTED_INTRINSIC(MathCbrt)
1003UNIMPLEMENTED_INTRINSIC(MathCosh)
1004UNIMPLEMENTED_INTRINSIC(MathExp)
1005UNIMPLEMENTED_INTRINSIC(MathExpm1)
1006UNIMPLEMENTED_INTRINSIC(MathHypot)
1007UNIMPLEMENTED_INTRINSIC(MathLog)
1008UNIMPLEMENTED_INTRINSIC(MathLog10)
1009UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1010UNIMPLEMENTED_INTRINSIC(MathSinh)
1011UNIMPLEMENTED_INTRINSIC(MathTan)
1012UNIMPLEMENTED_INTRINSIC(MathTanh)
Chris Larsen701566a2015-10-27 15:29:13 -07001013#undef UNIMPLEMENTED_INTRINSIC
1014
1015#undef __
1016
1017} // namespace mips
1018} // namespace art