blob: b4dbf75f0a5be8e2855ed399af751d0a9650b8f7 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105 Location::RegisterLocation(kArtMethodRegister));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000106 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107 } else {
108 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
109 UNREACHABLE();
110 }
111
112 // Copy the result back to the expected output.
113 Location out = invoke_->GetLocations()->Out();
114 if (out.IsValid()) {
115 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
116 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118 }
119
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000120 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121 __ b(GetExitLabel());
122 }
123
Alexandre Rames9931f312015-06-19 14:47:01 +0100124 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM"; }
125
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800126 private:
127 // The instruction where this slow path is happening.
128 HInvoke* const invoke_;
129
130 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
131};
132
133#undef __
134
135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136 Dispatch(invoke);
137 LocationSummary* res = invoke->GetLocations();
138 return res != nullptr && res->Intrinsified();
139}
140
141#define __ assembler->
142
143static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
144 LocationSummary* locations = new (arena) LocationSummary(invoke,
145 LocationSummary::kNoCall,
146 kIntrinsified);
147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresRegister());
156 locations->SetOut(Location::RequiresFpuRegister());
157}
158
159static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
160 Location input = locations->InAt(0);
161 Location output = locations->Out();
162 if (is64bit) {
163 __ vmovrrd(output.AsRegisterPairLow<Register>(),
164 output.AsRegisterPairHigh<Register>(),
165 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
166 } else {
167 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
168 }
169}
170
171static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
172 Location input = locations->InAt(0);
173 Location output = locations->Out();
174 if (is64bit) {
175 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
176 input.AsRegisterPairLow<Register>(),
177 input.AsRegisterPairHigh<Register>());
178 } else {
179 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
180 }
181}
182
183void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
184 CreateFPToIntLocations(arena_, invoke);
185}
186void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
187 CreateIntToFPLocations(arena_, invoke);
188}
189
190void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
192}
193void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
195}
196
197void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
206}
207void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
209}
210
211static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
212 LocationSummary* locations = new (arena) LocationSummary(invoke,
213 LocationSummary::kNoCall,
214 kIntrinsified);
215 locations->SetInAt(0, Location::RequiresRegister());
216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
217}
218
219static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
220 LocationSummary* locations = new (arena) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresFpuRegister());
224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
225}
226
227static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
228 Location in = locations->InAt(0);
229 Location out = locations->Out();
230
231 if (is64bit) {
232 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
233 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
234 } else {
235 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
236 }
237}
238
239void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
240 CreateFPToFPLocations(arena_, invoke);
241}
242
243void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
244 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
245}
246
247void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
248 CreateFPToFPLocations(arena_, invoke);
249}
250
251void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
252 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
253}
254
255static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
256 LocationSummary* locations = new (arena) LocationSummary(invoke,
257 LocationSummary::kNoCall,
258 kIntrinsified);
259 locations->SetInAt(0, Location::RequiresRegister());
260 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
261
262 locations->AddTemp(Location::RequiresRegister());
263}
264
265static void GenAbsInteger(LocationSummary* locations,
266 bool is64bit,
267 ArmAssembler* assembler) {
268 Location in = locations->InAt(0);
269 Location output = locations->Out();
270
271 Register mask = locations->GetTemp(0).AsRegister<Register>();
272
273 if (is64bit) {
274 Register in_reg_lo = in.AsRegisterPairLow<Register>();
275 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
276 Register out_reg_lo = output.AsRegisterPairLow<Register>();
277 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
278
279 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
280
281 __ Asr(mask, in_reg_hi, 31);
282 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
283 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
284 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
285 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
286 } else {
287 Register in_reg = in.AsRegister<Register>();
288 Register out_reg = output.AsRegister<Register>();
289
290 __ Asr(mask, in_reg, 31);
291 __ add(out_reg, in_reg, ShifterOperand(mask));
292 __ eor(out_reg, mask, ShifterOperand(out_reg));
293 }
294}
295
296void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
297 CreateIntToIntPlusTemp(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
301 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
302}
303
304
305void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
306 CreateIntToIntPlusTemp(arena_, invoke);
307}
308
309void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
310 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
311}
312
313static void GenMinMax(LocationSummary* locations,
314 bool is_min,
315 ArmAssembler* assembler) {
316 Register op1 = locations->InAt(0).AsRegister<Register>();
317 Register op2 = locations->InAt(1).AsRegister<Register>();
318 Register out = locations->Out().AsRegister<Register>();
319
320 __ cmp(op1, ShifterOperand(op2));
321
322 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
323 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
324 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
325}
326
327static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
328 LocationSummary* locations = new (arena) LocationSummary(invoke,
329 LocationSummary::kNoCall,
330 kIntrinsified);
331 locations->SetInAt(0, Location::RequiresRegister());
332 locations->SetInAt(1, Location::RequiresRegister());
333 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
334}
335
336void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
337 CreateIntIntToIntLocations(arena_, invoke);
338}
339
340void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
341 GenMinMax(invoke->GetLocations(), true, GetAssembler());
342}
343
344void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
345 CreateIntIntToIntLocations(arena_, invoke);
346}
347
348void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
349 GenMinMax(invoke->GetLocations(), false, GetAssembler());
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
353 CreateFPToFPLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
357 LocationSummary* locations = invoke->GetLocations();
358 ArmAssembler* assembler = GetAssembler();
359 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
360 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
361}
362
363void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
368 ArmAssembler* assembler = GetAssembler();
369 // Ignore upper 4B of long address.
370 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
371 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
372}
373
374void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
375 CreateIntToIntLocations(arena_, invoke);
376}
377
378void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
379 ArmAssembler* assembler = GetAssembler();
380 // Ignore upper 4B of long address.
381 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
382 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
383}
384
385void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
386 CreateIntToIntLocations(arena_, invoke);
387}
388
389void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
390 ArmAssembler* assembler = GetAssembler();
391 // Ignore upper 4B of long address.
392 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
393 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
394 // exception. So we can't use ldrd as addr may be unaligned.
395 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
396 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
397 if (addr == lo) {
398 __ ldr(hi, Address(addr, 4));
399 __ ldr(lo, Address(addr, 0));
400 } else {
401 __ ldr(lo, Address(addr, 0));
402 __ ldr(hi, Address(addr, 4));
403 }
404}
405
406void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
407 CreateIntToIntLocations(arena_, invoke);
408}
409
410void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
411 ArmAssembler* assembler = GetAssembler();
412 // Ignore upper 4B of long address.
413 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
414 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
415}
416
417static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
418 LocationSummary* locations = new (arena) LocationSummary(invoke,
419 LocationSummary::kNoCall,
420 kIntrinsified);
421 locations->SetInAt(0, Location::RequiresRegister());
422 locations->SetInAt(1, Location::RequiresRegister());
423}
424
425void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
426 CreateIntIntToVoidLocations(arena_, invoke);
427}
428
429void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
430 ArmAssembler* assembler = GetAssembler();
431 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
432 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
433}
434
435void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
436 CreateIntIntToVoidLocations(arena_, invoke);
437}
438
439void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
440 ArmAssembler* assembler = GetAssembler();
441 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
442 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
443}
444
445void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
446 CreateIntIntToVoidLocations(arena_, invoke);
447}
448
449void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
450 ArmAssembler* assembler = GetAssembler();
451 // Ignore upper 4B of long address.
452 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
453 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
454 // exception. So we can't use ldrd as addr may be unaligned.
455 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
456 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
457}
458
459void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
460 CreateIntIntToVoidLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
464 ArmAssembler* assembler = GetAssembler();
465 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
466 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
467}
468
469void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
470 LocationSummary* locations = new (arena_) LocationSummary(invoke,
471 LocationSummary::kNoCall,
472 kIntrinsified);
473 locations->SetOut(Location::RequiresRegister());
474}
475
476void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
477 ArmAssembler* assembler = GetAssembler();
478 __ LoadFromOffset(kLoadWord,
479 invoke->GetLocations()->Out().AsRegister<Register>(),
480 TR,
481 Thread::PeerOffset<kArmPointerSize>().Int32Value());
482}
483
484static void GenUnsafeGet(HInvoke* invoke,
485 Primitive::Type type,
486 bool is_volatile,
487 CodeGeneratorARM* codegen) {
488 LocationSummary* locations = invoke->GetLocations();
489 DCHECK((type == Primitive::kPrimInt) ||
490 (type == Primitive::kPrimLong) ||
491 (type == Primitive::kPrimNot));
492 ArmAssembler* assembler = codegen->GetAssembler();
493 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
494 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
495
496 if (type == Primitive::kPrimLong) {
497 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
498 __ add(IP, base, ShifterOperand(offset));
499 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
500 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
501 __ ldrexd(trg_lo, trg_hi, IP);
502 } else {
503 __ ldrd(trg_lo, Address(IP));
504 }
505 } else {
506 Register trg = locations->Out().AsRegister<Register>();
507 __ ldr(trg, Address(base, offset));
508 }
509
510 if (is_volatile) {
511 __ dmb(ISH);
512 }
Roland Levillain4d027112015-07-01 15:41:14 +0100513
514 if (type == Primitive::kPrimNot) {
515 Register trg = locations->Out().AsRegister<Register>();
516 __ MaybeUnpoisonHeapReference(trg);
517 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800518}
519
520static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
521 LocationSummary* locations = new (arena) LocationSummary(invoke,
522 LocationSummary::kNoCall,
523 kIntrinsified);
524 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
525 locations->SetInAt(1, Location::RequiresRegister());
526 locations->SetInAt(2, Location::RequiresRegister());
527 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
528}
529
530void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
531 CreateIntIntIntToIntLocations(arena_, invoke);
532}
533void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
534 CreateIntIntIntToIntLocations(arena_, invoke);
535}
536void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
537 CreateIntIntIntToIntLocations(arena_, invoke);
538}
539void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
540 CreateIntIntIntToIntLocations(arena_, invoke);
541}
542void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
543 CreateIntIntIntToIntLocations(arena_, invoke);
544}
545void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
546 CreateIntIntIntToIntLocations(arena_, invoke);
547}
548
549void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
550 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
551}
552void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
553 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
554}
555void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
556 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
557}
558void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
559 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
560}
561void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
562 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
563}
564void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
565 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
566}
567
568static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
569 const ArmInstructionSetFeatures& features,
570 Primitive::Type type,
571 bool is_volatile,
572 HInvoke* invoke) {
573 LocationSummary* locations = new (arena) LocationSummary(invoke,
574 LocationSummary::kNoCall,
575 kIntrinsified);
576 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
577 locations->SetInAt(1, Location::RequiresRegister());
578 locations->SetInAt(2, Location::RequiresRegister());
579 locations->SetInAt(3, Location::RequiresRegister());
580
581 if (type == Primitive::kPrimLong) {
582 // Potentially need temps for ldrexd-strexd loop.
583 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
584 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
585 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
586 }
587 } else if (type == Primitive::kPrimNot) {
588 // Temps for card-marking.
589 locations->AddTemp(Location::RequiresRegister()); // Temp.
590 locations->AddTemp(Location::RequiresRegister()); // Card.
591 }
592}
593
594void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
595 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
596}
597void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
598 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
599}
600void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
601 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
602}
603void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
604 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
605}
606void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
607 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
608}
609void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
610 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
611}
612void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
613 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
614}
615void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
616 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
617}
618void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
619 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
620}
621
622static void GenUnsafePut(LocationSummary* locations,
623 Primitive::Type type,
624 bool is_volatile,
625 bool is_ordered,
626 CodeGeneratorARM* codegen) {
627 ArmAssembler* assembler = codegen->GetAssembler();
628
629 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
630 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
631 Register value;
632
633 if (is_volatile || is_ordered) {
634 __ dmb(ISH);
635 }
636
637 if (type == Primitive::kPrimLong) {
638 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
639 value = value_lo;
640 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
641 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
642 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
643 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
644
645 __ add(IP, base, ShifterOperand(offset));
646 Label loop_head;
647 __ Bind(&loop_head);
648 __ ldrexd(temp_lo, temp_hi, IP);
649 __ strexd(temp_lo, value_lo, value_hi, IP);
650 __ cmp(temp_lo, ShifterOperand(0));
651 __ b(&loop_head, NE);
652 } else {
653 __ add(IP, base, ShifterOperand(offset));
654 __ strd(value_lo, Address(IP));
655 }
656 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100657 value = locations->InAt(3).AsRegister<Register>();
658 Register source = value;
659 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
660 Register temp = locations->GetTemp(0).AsRegister<Register>();
661 __ Mov(temp, value);
662 __ PoisonHeapReference(temp);
663 source = temp;
664 }
665 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800666 }
667
668 if (is_volatile) {
669 __ dmb(ISH);
670 }
671
672 if (type == Primitive::kPrimNot) {
673 Register temp = locations->GetTemp(0).AsRegister<Register>();
674 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100675 bool value_can_be_null = true; // TODO: Worth finding out this information?
676 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800677 }
678}
679
680void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
681 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
682}
683void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
684 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
685}
686void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
687 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
688}
689void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
690 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
691}
692void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
693 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
694}
695void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
696 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
697}
698void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
699 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
700}
701void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
702 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
703}
704void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
705 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
706}
707
708static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
709 HInvoke* invoke) {
710 LocationSummary* locations = new (arena) LocationSummary(invoke,
711 LocationSummary::kNoCall,
712 kIntrinsified);
713 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
714 locations->SetInAt(1, Location::RequiresRegister());
715 locations->SetInAt(2, Location::RequiresRegister());
716 locations->SetInAt(3, Location::RequiresRegister());
717 locations->SetInAt(4, Location::RequiresRegister());
718
719 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
720
721 locations->AddTemp(Location::RequiresRegister()); // Pointer.
722 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
723 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
724}
725
726static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
727 DCHECK_NE(type, Primitive::kPrimLong);
728
729 ArmAssembler* assembler = codegen->GetAssembler();
730
731 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
732
733 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
734 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
735 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
736 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
737
738 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
739 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
740
741 if (type == Primitive::kPrimNot) {
742 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
743 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100744 bool value_can_be_null = true; // TODO: Worth finding out this information?
745 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800746 }
747
748 // Prevent reordering with prior memory operations.
749 __ dmb(ISH);
750
751 __ add(tmp_ptr, base, ShifterOperand(offset));
752
Roland Levillain4d027112015-07-01 15:41:14 +0100753 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
754 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
755 codegen->GetAssembler()->PoisonHeapReference(value_lo);
756 }
757
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800758 // do {
759 // tmp = [r_ptr] - expected;
760 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
761 // result = tmp != 0;
762
763 Label loop_head;
764 __ Bind(&loop_head);
765
766 __ ldrex(tmp_lo, tmp_ptr);
767
768 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
769
770 __ it(EQ, ItState::kItT);
771 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
772 __ cmp(tmp_lo, ShifterOperand(1), EQ);
773
774 __ b(&loop_head, EQ);
775
776 __ dmb(ISH);
777
778 __ rsbs(out, tmp_lo, ShifterOperand(1));
779 __ it(CC);
780 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100781
782 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
783 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
784 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
785 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800786}
787
Andreas Gampeca714582015-04-03 19:41:34 -0700788void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800789 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
790}
Andreas Gampeca714582015-04-03 19:41:34 -0700791void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800792 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
793}
794void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
795 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
796}
797void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
798 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
799}
800
801void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
802 LocationSummary* locations = new (arena_) LocationSummary(invoke,
803 LocationSummary::kCallOnSlowPath,
804 kIntrinsified);
805 locations->SetInAt(0, Location::RequiresRegister());
806 locations->SetInAt(1, Location::RequiresRegister());
807 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
808
809 locations->AddTemp(Location::RequiresRegister());
810 locations->AddTemp(Location::RequiresRegister());
811}
812
813void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
814 ArmAssembler* assembler = GetAssembler();
815 LocationSummary* locations = invoke->GetLocations();
816
817 // Location of reference to data array
818 const MemberOffset value_offset = mirror::String::ValueOffset();
819 // Location of count
820 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800821
822 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
823 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
824 Register out = locations->Out().AsRegister<Register>(); // Result character.
825
826 Register temp = locations->GetTemp(0).AsRegister<Register>();
827 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
828
829 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
830 // the cost.
831 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
832 // we will not optimize the code for constants (which would save a register).
833
834 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
835 codegen_->AddSlowPath(slow_path);
836
837 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
838 codegen_->MaybeRecordImplicitNullCheck(invoke);
839 __ cmp(idx, ShifterOperand(temp));
840 __ b(slow_path->GetEntryLabel(), CS);
841
Jeff Hao848f70a2014-01-15 13:49:50 -0800842 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800843
844 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800845 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800846
847 __ Bind(slow_path->GetExitLabel());
848}
849
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000850void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
851 // The inputs plus one temp.
852 LocationSummary* locations = new (arena_) LocationSummary(invoke,
853 LocationSummary::kCall,
854 kIntrinsified);
855 InvokeRuntimeCallingConvention calling_convention;
856 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
857 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
858 locations->SetOut(Location::RegisterLocation(R0));
859}
860
861void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
862 ArmAssembler* assembler = GetAssembler();
863 LocationSummary* locations = invoke->GetLocations();
864
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000865 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100866 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000867
868 Register argument = locations->InAt(1).AsRegister<Register>();
869 __ cmp(argument, ShifterOperand(0));
870 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
871 codegen_->AddSlowPath(slow_path);
872 __ b(slow_path->GetEntryLabel(), EQ);
873
874 __ LoadFromOffset(
875 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
876 __ blx(LR);
877 __ Bind(slow_path->GetExitLabel());
878}
879
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700880static void GenerateVisitStringIndexOf(HInvoke* invoke,
881 ArmAssembler* assembler,
882 CodeGeneratorARM* codegen,
883 ArenaAllocator* allocator,
884 bool start_at_zero) {
885 LocationSummary* locations = invoke->GetLocations();
886 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
887
888 // Note that the null check must have been done earlier.
889 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
890
891 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
892 // or directly dispatch if we have a constant.
893 SlowPathCodeARM* slow_path = nullptr;
894 if (invoke->InputAt(1)->IsIntConstant()) {
895 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
896 std::numeric_limits<uint16_t>::max()) {
897 // Always needs the slow-path. We could directly dispatch to it, but this case should be
898 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
899 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
900 codegen->AddSlowPath(slow_path);
901 __ b(slow_path->GetEntryLabel());
902 __ Bind(slow_path->GetExitLabel());
903 return;
904 }
905 } else {
906 Register char_reg = locations->InAt(1).AsRegister<Register>();
907 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
908 __ cmp(char_reg, ShifterOperand(tmp_reg));
909 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
910 codegen->AddSlowPath(slow_path);
911 __ b(slow_path->GetEntryLabel(), HI);
912 }
913
914 if (start_at_zero) {
915 DCHECK_EQ(tmp_reg, R2);
916 // Start-index = 0.
917 __ LoadImmediate(tmp_reg, 0);
918 }
919
920 __ LoadFromOffset(kLoadWord, LR, TR,
921 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
922 __ blx(LR);
923
924 if (slow_path != nullptr) {
925 __ Bind(slow_path->GetExitLabel());
926 }
927}
928
929void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
930 LocationSummary* locations = new (arena_) LocationSummary(invoke,
931 LocationSummary::kCall,
932 kIntrinsified);
933 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
934 // best to align the inputs accordingly.
935 InvokeRuntimeCallingConvention calling_convention;
936 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
937 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
938 locations->SetOut(Location::RegisterLocation(R0));
939
940 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
941 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
942}
943
944void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
945 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
946}
947
948void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
949 LocationSummary* locations = new (arena_) LocationSummary(invoke,
950 LocationSummary::kCall,
951 kIntrinsified);
952 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
953 // best to align the inputs accordingly.
954 InvokeRuntimeCallingConvention calling_convention;
955 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
956 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
957 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
958 locations->SetOut(Location::RegisterLocation(R0));
959
960 // Need a temp for slow-path codepoint compare.
961 locations->AddTemp(Location::RequiresRegister());
962}
963
964void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
965 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
966}
967
Jeff Hao848f70a2014-01-15 13:49:50 -0800968void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
969 LocationSummary* locations = new (arena_) LocationSummary(invoke,
970 LocationSummary::kCall,
971 kIntrinsified);
972 InvokeRuntimeCallingConvention calling_convention;
973 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
974 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
975 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
976 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
977 locations->SetOut(Location::RegisterLocation(R0));
978}
979
980void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
981 ArmAssembler* assembler = GetAssembler();
982 LocationSummary* locations = invoke->GetLocations();
983
984 Register byte_array = locations->InAt(0).AsRegister<Register>();
985 __ cmp(byte_array, ShifterOperand(0));
986 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
987 codegen_->AddSlowPath(slow_path);
988 __ b(slow_path->GetEntryLabel(), EQ);
989
990 __ LoadFromOffset(
991 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
992 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
993 __ blx(LR);
994 __ Bind(slow_path->GetExitLabel());
995}
996
997void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
998 LocationSummary* locations = new (arena_) LocationSummary(invoke,
999 LocationSummary::kCall,
1000 kIntrinsified);
1001 InvokeRuntimeCallingConvention calling_convention;
1002 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1003 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1004 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1005 locations->SetOut(Location::RegisterLocation(R0));
1006}
1007
1008void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1009 ArmAssembler* assembler = GetAssembler();
1010
1011 __ LoadFromOffset(
1012 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1013 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1014 __ blx(LR);
1015}
1016
1017void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1018 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1019 LocationSummary::kCall,
1020 kIntrinsified);
1021 InvokeRuntimeCallingConvention calling_convention;
1022 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1023 locations->SetOut(Location::RegisterLocation(R0));
1024}
1025
1026void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1027 ArmAssembler* assembler = GetAssembler();
1028 LocationSummary* locations = invoke->GetLocations();
1029
1030 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1031 __ cmp(string_to_copy, ShifterOperand(0));
1032 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1033 codegen_->AddSlowPath(slow_path);
1034 __ b(slow_path->GetEntryLabel(), EQ);
1035
1036 __ LoadFromOffset(kLoadWord,
1037 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1038 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1039 __ blx(LR);
1040 __ Bind(slow_path->GetExitLabel());
1041}
1042
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001043// Unimplemented intrinsics.
1044
1045#define UNIMPLEMENTED_INTRINSIC(Name) \
1046void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1047} \
1048void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1049}
1050
1051UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1052UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1053UNIMPLEMENTED_INTRINSIC(LongReverse)
1054UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1055UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1056UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1057UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1058UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1059UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1060UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1061UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1062UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1063UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1064UNIMPLEMENTED_INTRINSIC(MathRint)
1065UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1066UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1067UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1068UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001069UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001070UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001071
Roland Levillain4d027112015-07-01 15:41:14 +01001072#undef UNIMPLEMENTED_INTRINSIC
1073
1074#undef __
1075
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001076} // namespace arm
1077} // namespace art