blob: f0940629ecf7a94d73d373c0c9206eb76099b349 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45 Dispatch(invoke);
46 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +000047 if (res == nullptr) {
48 return false;
49 }
50 if (kEmitCompilerReadBarrier && res->CanCall()) {
51 // Generating an intrinsic for this HInvoke may produce an
52 // IntrinsicSlowPathARM slow path. Currently this approach
53 // does not work when using read barriers, as the emitted
54 // calling sequence will make use of another slow path
55 // (ReadBarrierForRootSlowPathARM for HInvokeStaticOrDirect,
56 // ReadBarrierSlowPathARM for HInvokeVirtual). So we bail
57 // out in this case.
58 //
59 // TODO: Find a way to have intrinsics work with read barriers.
60 invoke->SetLocations(nullptr);
61 return false;
62 }
63 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080064}
65
66#define __ assembler->
67
68static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
69 LocationSummary* locations = new (arena) LocationSummary(invoke,
70 LocationSummary::kNoCall,
71 kIntrinsified);
72 locations->SetInAt(0, Location::RequiresFpuRegister());
73 locations->SetOut(Location::RequiresRegister());
74}
75
76static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
77 LocationSummary* locations = new (arena) LocationSummary(invoke,
78 LocationSummary::kNoCall,
79 kIntrinsified);
80 locations->SetInAt(0, Location::RequiresRegister());
81 locations->SetOut(Location::RequiresFpuRegister());
82}
83
84static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
85 Location input = locations->InAt(0);
86 Location output = locations->Out();
87 if (is64bit) {
88 __ vmovrrd(output.AsRegisterPairLow<Register>(),
89 output.AsRegisterPairHigh<Register>(),
90 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
91 } else {
92 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
93 }
94}
95
96static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
97 Location input = locations->InAt(0);
98 Location output = locations->Out();
99 if (is64bit) {
100 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
101 input.AsRegisterPairLow<Register>(),
102 input.AsRegisterPairHigh<Register>());
103 } else {
104 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
105 }
106}
107
108void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
109 CreateFPToIntLocations(arena_, invoke);
110}
111void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
112 CreateIntToFPLocations(arena_, invoke);
113}
114
115void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000116 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800117}
118void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000119 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800120}
121
122void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
123 CreateFPToIntLocations(arena_, invoke);
124}
125void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
126 CreateIntToFPLocations(arena_, invoke);
127}
128
129void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000130 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800131}
132void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000133 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800134}
135
136static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
137 LocationSummary* locations = new (arena) LocationSummary(invoke,
138 LocationSummary::kNoCall,
139 kIntrinsified);
140 locations->SetInAt(0, Location::RequiresRegister());
141 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
142}
143
144static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
145 LocationSummary* locations = new (arena) LocationSummary(invoke,
146 LocationSummary::kNoCall,
147 kIntrinsified);
148 locations->SetInAt(0, Location::RequiresFpuRegister());
149 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
150}
151
Scott Wakeling611d3392015-07-10 11:42:06 +0100152static void GenNumberOfLeadingZeros(LocationSummary* locations,
153 Primitive::Type type,
154 ArmAssembler* assembler) {
155 Location in = locations->InAt(0);
156 Register out = locations->Out().AsRegister<Register>();
157
158 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
159
160 if (type == Primitive::kPrimLong) {
161 Register in_reg_lo = in.AsRegisterPairLow<Register>();
162 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
163 Label end;
164 __ clz(out, in_reg_hi);
165 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
166 __ clz(out, in_reg_lo);
167 __ AddConstant(out, 32);
168 __ Bind(&end);
169 } else {
170 __ clz(out, in.AsRegister<Register>());
171 }
172}
173
174void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
175 CreateIntToIntLocations(arena_, invoke);
176}
177
178void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
179 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
180}
181
182void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
183 LocationSummary* locations = new (arena_) LocationSummary(invoke,
184 LocationSummary::kNoCall,
185 kIntrinsified);
186 locations->SetInAt(0, Location::RequiresRegister());
187 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
188}
189
190void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
191 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
192}
193
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100194static void GenNumberOfTrailingZeros(LocationSummary* locations,
195 Primitive::Type type,
196 ArmAssembler* assembler) {
197 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
198
199 Register out = locations->Out().AsRegister<Register>();
200
201 if (type == Primitive::kPrimLong) {
202 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
203 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
204 Label end;
205 __ rbit(out, in_reg_lo);
206 __ clz(out, out);
207 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
208 __ rbit(out, in_reg_hi);
209 __ clz(out, out);
210 __ AddConstant(out, 32);
211 __ Bind(&end);
212 } else {
213 Register in = locations->InAt(0).AsRegister<Register>();
214 __ rbit(out, in);
215 __ clz(out, out);
216 }
217}
218
219void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
220 LocationSummary* locations = new (arena_) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
228 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
229}
230
231void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
232 LocationSummary* locations = new (arena_) LocationSummary(invoke,
233 LocationSummary::kNoCall,
234 kIntrinsified);
235 locations->SetInAt(0, Location::RequiresRegister());
236 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
237}
238
239void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
240 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
241}
242
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800243static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
244 Location in = locations->InAt(0);
245 Location out = locations->Out();
246
247 if (is64bit) {
248 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
249 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
250 } else {
251 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
252 }
253}
254
255void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
256 CreateFPToFPLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000260 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800261}
262
263void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
264 CreateFPToFPLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000268 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800269}
270
271static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
272 LocationSummary* locations = new (arena) LocationSummary(invoke,
273 LocationSummary::kNoCall,
274 kIntrinsified);
275 locations->SetInAt(0, Location::RequiresRegister());
276 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
277
278 locations->AddTemp(Location::RequiresRegister());
279}
280
281static void GenAbsInteger(LocationSummary* locations,
282 bool is64bit,
283 ArmAssembler* assembler) {
284 Location in = locations->InAt(0);
285 Location output = locations->Out();
286
287 Register mask = locations->GetTemp(0).AsRegister<Register>();
288
289 if (is64bit) {
290 Register in_reg_lo = in.AsRegisterPairLow<Register>();
291 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
292 Register out_reg_lo = output.AsRegisterPairLow<Register>();
293 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
294
295 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
296
297 __ Asr(mask, in_reg_hi, 31);
298 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
299 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
300 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
301 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
302 } else {
303 Register in_reg = in.AsRegister<Register>();
304 Register out_reg = output.AsRegister<Register>();
305
306 __ Asr(mask, in_reg, 31);
307 __ add(out_reg, in_reg, ShifterOperand(mask));
308 __ eor(out_reg, mask, ShifterOperand(out_reg));
309 }
310}
311
312void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
313 CreateIntToIntPlusTemp(arena_, invoke);
314}
315
316void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000317 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800318}
319
320
321void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
322 CreateIntToIntPlusTemp(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000326 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800327}
328
329static void GenMinMax(LocationSummary* locations,
330 bool is_min,
331 ArmAssembler* assembler) {
332 Register op1 = locations->InAt(0).AsRegister<Register>();
333 Register op2 = locations->InAt(1).AsRegister<Register>();
334 Register out = locations->Out().AsRegister<Register>();
335
336 __ cmp(op1, ShifterOperand(op2));
337
338 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
339 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
340 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
341}
342
343static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
344 LocationSummary* locations = new (arena) LocationSummary(invoke,
345 LocationSummary::kNoCall,
346 kIntrinsified);
347 locations->SetInAt(0, Location::RequiresRegister());
348 locations->SetInAt(1, Location::RequiresRegister());
349 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
353 CreateIntIntToIntLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000357 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800358}
359
360void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
361 CreateIntIntToIntLocations(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000365 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800366}
367
368void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
369 CreateFPToFPLocations(arena_, invoke);
370}
371
372void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
373 LocationSummary* locations = invoke->GetLocations();
374 ArmAssembler* assembler = GetAssembler();
375 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
376 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
377}
378
379void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
380 CreateIntToIntLocations(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
384 ArmAssembler* assembler = GetAssembler();
385 // Ignore upper 4B of long address.
386 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
387 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
388}
389
390void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
391 CreateIntToIntLocations(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
395 ArmAssembler* assembler = GetAssembler();
396 // Ignore upper 4B of long address.
397 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
398 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
399}
400
401void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
402 CreateIntToIntLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
406 ArmAssembler* assembler = GetAssembler();
407 // Ignore upper 4B of long address.
408 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
409 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
410 // exception. So we can't use ldrd as addr may be unaligned.
411 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
412 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
413 if (addr == lo) {
414 __ ldr(hi, Address(addr, 4));
415 __ ldr(lo, Address(addr, 0));
416 } else {
417 __ ldr(lo, Address(addr, 0));
418 __ ldr(hi, Address(addr, 4));
419 }
420}
421
422void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
423 CreateIntToIntLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
427 ArmAssembler* assembler = GetAssembler();
428 // Ignore upper 4B of long address.
429 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
430 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
431}
432
433static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
434 LocationSummary* locations = new (arena) LocationSummary(invoke,
435 LocationSummary::kNoCall,
436 kIntrinsified);
437 locations->SetInAt(0, Location::RequiresRegister());
438 locations->SetInAt(1, Location::RequiresRegister());
439}
440
441void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
442 CreateIntIntToVoidLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
446 ArmAssembler* assembler = GetAssembler();
447 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
448 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
449}
450
451void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
452 CreateIntIntToVoidLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
456 ArmAssembler* assembler = GetAssembler();
457 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
458 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
459}
460
461void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
462 CreateIntIntToVoidLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
466 ArmAssembler* assembler = GetAssembler();
467 // Ignore upper 4B of long address.
468 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
469 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
470 // exception. So we can't use ldrd as addr may be unaligned.
471 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
472 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
473}
474
475void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
476 CreateIntIntToVoidLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
480 ArmAssembler* assembler = GetAssembler();
481 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
482 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
483}
484
485void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
486 LocationSummary* locations = new (arena_) LocationSummary(invoke,
487 LocationSummary::kNoCall,
488 kIntrinsified);
489 locations->SetOut(Location::RequiresRegister());
490}
491
492void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
493 ArmAssembler* assembler = GetAssembler();
494 __ LoadFromOffset(kLoadWord,
495 invoke->GetLocations()->Out().AsRegister<Register>(),
496 TR,
497 Thread::PeerOffset<kArmPointerSize>().Int32Value());
498}
499
500static void GenUnsafeGet(HInvoke* invoke,
501 Primitive::Type type,
502 bool is_volatile,
503 CodeGeneratorARM* codegen) {
504 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800505 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000506 Location base_loc = locations->InAt(1);
507 Register base = base_loc.AsRegister<Register>(); // Object pointer.
508 Location offset_loc = locations->InAt(2);
509 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
510 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800511
Roland Levillainc9285912015-12-18 10:38:42 +0000512 switch (type) {
513 case Primitive::kPrimInt: {
514 Register trg = trg_loc.AsRegister<Register>();
515 __ ldr(trg, Address(base, offset));
516 if (is_volatile) {
517 __ dmb(ISH);
518 }
519 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800520 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800521
Roland Levillainc9285912015-12-18 10:38:42 +0000522 case Primitive::kPrimNot: {
523 Register trg = trg_loc.AsRegister<Register>();
524 if (kEmitCompilerReadBarrier) {
525 if (kUseBakerReadBarrier) {
526 Location temp = locations->GetTemp(0);
Roland Levillainbfea3352016-06-23 13:48:47 +0100527 codegen->GenerateReferenceLoadWithBakerReadBarrier(
528 invoke, trg_loc, base, 0U, offset_loc, TIMES_1, temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +0000529 if (is_volatile) {
530 __ dmb(ISH);
531 }
532 } else {
533 __ ldr(trg, Address(base, offset));
534 if (is_volatile) {
535 __ dmb(ISH);
536 }
537 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
538 }
539 } else {
540 __ ldr(trg, Address(base, offset));
541 if (is_volatile) {
542 __ dmb(ISH);
543 }
544 __ MaybeUnpoisonHeapReference(trg);
545 }
546 break;
547 }
Roland Levillain4d027112015-07-01 15:41:14 +0100548
Roland Levillainc9285912015-12-18 10:38:42 +0000549 case Primitive::kPrimLong: {
550 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
551 __ add(IP, base, ShifterOperand(offset));
552 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
553 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
554 __ ldrexd(trg_lo, trg_hi, IP);
555 } else {
556 __ ldrd(trg_lo, Address(IP));
557 }
558 if (is_volatile) {
559 __ dmb(ISH);
560 }
561 break;
562 }
563
564 default:
565 LOG(FATAL) << "Unexpected type " << type;
566 UNREACHABLE();
Roland Levillain4d027112015-07-01 15:41:14 +0100567 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800568}
569
Roland Levillainc9285912015-12-18 10:38:42 +0000570static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
571 HInvoke* invoke,
572 Primitive::Type type) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000573 bool can_call = kEmitCompilerReadBarrier &&
574 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
575 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800576 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain3b359c72015-11-17 19:35:12 +0000577 can_call ?
578 LocationSummary::kCallOnSlowPath :
579 LocationSummary::kNoCall,
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800580 kIntrinsified);
581 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
582 locations->SetInAt(1, Location::RequiresRegister());
583 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100584 locations->SetOut(Location::RequiresRegister(),
585 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Roland Levillainc9285912015-12-18 10:38:42 +0000586 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
587 // We need a temporary register for the read barrier marking slow
Roland Levillainbfea3352016-06-23 13:48:47 +0100588 // path in InstructionCodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier.
Roland Levillainc9285912015-12-18 10:38:42 +0000589 locations->AddTemp(Location::RequiresRegister());
590 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800591}
592
593void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000594 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800595}
596void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000597 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800598}
599void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000600 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800601}
602void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000603 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800604}
605void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000606 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800607}
608void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000609 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800610}
611
612void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000613 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800614}
615void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000616 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800617}
618void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000619 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800620}
621void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000622 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800623}
624void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000625 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800626}
627void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000628 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800629}
630
631static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
632 const ArmInstructionSetFeatures& features,
633 Primitive::Type type,
634 bool is_volatile,
635 HInvoke* invoke) {
636 LocationSummary* locations = new (arena) LocationSummary(invoke,
637 LocationSummary::kNoCall,
638 kIntrinsified);
639 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
640 locations->SetInAt(1, Location::RequiresRegister());
641 locations->SetInAt(2, Location::RequiresRegister());
642 locations->SetInAt(3, Location::RequiresRegister());
643
644 if (type == Primitive::kPrimLong) {
645 // Potentially need temps for ldrexd-strexd loop.
646 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
647 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
648 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
649 }
650 } else if (type == Primitive::kPrimNot) {
651 // Temps for card-marking.
652 locations->AddTemp(Location::RequiresRegister()); // Temp.
653 locations->AddTemp(Location::RequiresRegister()); // Card.
654 }
655}
656
657void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000658 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800659}
660void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000661 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800662}
663void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000664 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800665}
666void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000667 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800668}
669void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000670 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800671}
672void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000673 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800674}
675void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000676 CreateIntIntIntIntToVoid(
677 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800678}
679void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000680 CreateIntIntIntIntToVoid(
681 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800682}
683void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000684 CreateIntIntIntIntToVoid(
685 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800686}
687
688static void GenUnsafePut(LocationSummary* locations,
689 Primitive::Type type,
690 bool is_volatile,
691 bool is_ordered,
692 CodeGeneratorARM* codegen) {
693 ArmAssembler* assembler = codegen->GetAssembler();
694
695 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
696 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
697 Register value;
698
699 if (is_volatile || is_ordered) {
700 __ dmb(ISH);
701 }
702
703 if (type == Primitive::kPrimLong) {
704 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
705 value = value_lo;
706 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
707 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
708 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
709 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
710
711 __ add(IP, base, ShifterOperand(offset));
712 Label loop_head;
713 __ Bind(&loop_head);
714 __ ldrexd(temp_lo, temp_hi, IP);
715 __ strexd(temp_lo, value_lo, value_hi, IP);
716 __ cmp(temp_lo, ShifterOperand(0));
717 __ b(&loop_head, NE);
718 } else {
719 __ add(IP, base, ShifterOperand(offset));
720 __ strd(value_lo, Address(IP));
721 }
722 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100723 value = locations->InAt(3).AsRegister<Register>();
724 Register source = value;
725 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
726 Register temp = locations->GetTemp(0).AsRegister<Register>();
727 __ Mov(temp, value);
728 __ PoisonHeapReference(temp);
729 source = temp;
730 }
731 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800732 }
733
734 if (is_volatile) {
735 __ dmb(ISH);
736 }
737
738 if (type == Primitive::kPrimNot) {
739 Register temp = locations->GetTemp(0).AsRegister<Register>();
740 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100741 bool value_can_be_null = true; // TODO: Worth finding out this information?
742 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800743 }
744}
745
746void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000747 GenUnsafePut(invoke->GetLocations(),
748 Primitive::kPrimInt,
749 /* is_volatile */ false,
750 /* is_ordered */ false,
751 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800752}
753void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000754 GenUnsafePut(invoke->GetLocations(),
755 Primitive::kPrimInt,
756 /* is_volatile */ false,
757 /* is_ordered */ true,
758 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800759}
760void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000761 GenUnsafePut(invoke->GetLocations(),
762 Primitive::kPrimInt,
763 /* is_volatile */ true,
764 /* is_ordered */ false,
765 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800766}
767void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000768 GenUnsafePut(invoke->GetLocations(),
769 Primitive::kPrimNot,
770 /* is_volatile */ false,
771 /* is_ordered */ false,
772 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800773}
774void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000775 GenUnsafePut(invoke->GetLocations(),
776 Primitive::kPrimNot,
777 /* is_volatile */ false,
778 /* is_ordered */ true,
779 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800780}
781void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000782 GenUnsafePut(invoke->GetLocations(),
783 Primitive::kPrimNot,
784 /* is_volatile */ true,
785 /* is_ordered */ false,
786 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800787}
788void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000789 GenUnsafePut(invoke->GetLocations(),
790 Primitive::kPrimLong,
791 /* is_volatile */ false,
792 /* is_ordered */ false,
793 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800794}
795void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000796 GenUnsafePut(invoke->GetLocations(),
797 Primitive::kPrimLong,
798 /* is_volatile */ false,
799 /* is_ordered */ true,
800 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800801}
802void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000803 GenUnsafePut(invoke->GetLocations(),
804 Primitive::kPrimLong,
805 /* is_volatile */ true,
806 /* is_ordered */ false,
807 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800808}
809
810static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000811 HInvoke* invoke,
812 Primitive::Type type) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800813 LocationSummary* locations = new (arena) LocationSummary(invoke,
814 LocationSummary::kNoCall,
815 kIntrinsified);
816 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
817 locations->SetInAt(1, Location::RequiresRegister());
818 locations->SetInAt(2, Location::RequiresRegister());
819 locations->SetInAt(3, Location::RequiresRegister());
820 locations->SetInAt(4, Location::RequiresRegister());
821
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000822 // If heap poisoning is enabled, we don't want the unpoisoning
823 // operations to potentially clobber the output.
824 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
825 ? Location::kOutputOverlap
826 : Location::kNoOutputOverlap;
827 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800828
829 locations->AddTemp(Location::RequiresRegister()); // Pointer.
830 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800831}
832
833static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
834 DCHECK_NE(type, Primitive::kPrimLong);
835
836 ArmAssembler* assembler = codegen->GetAssembler();
837
838 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
839
840 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
841 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
842 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
843 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
844
845 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
846 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
847
848 if (type == Primitive::kPrimNot) {
849 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
850 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100851 bool value_can_be_null = true; // TODO: Worth finding out this information?
852 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800853 }
854
855 // Prevent reordering with prior memory operations.
Roland Levillain4bedb382016-01-12 12:01:04 +0000856 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
857 // latter allows a preceding load to be delayed past the STXR
858 // instruction below.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800859 __ dmb(ISH);
860
861 __ add(tmp_ptr, base, ShifterOperand(offset));
862
Roland Levillain4d027112015-07-01 15:41:14 +0100863 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
864 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000865 if (value_lo == expected_lo) {
866 // Do not poison `value_lo`, as it is the same register as
867 // `expected_lo`, which has just been poisoned.
868 } else {
869 codegen->GetAssembler()->PoisonHeapReference(value_lo);
870 }
Roland Levillain4d027112015-07-01 15:41:14 +0100871 }
872
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800873 // do {
874 // tmp = [r_ptr] - expected;
875 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
876 // result = tmp != 0;
877
878 Label loop_head;
879 __ Bind(&loop_head);
880
Roland Levillain391b8662015-12-18 11:43:38 +0000881 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
882 // the reference stored in the object before attempting the CAS,
883 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
884 // implementation.
885 //
886 // Note that this code is not (yet) used when read barriers are
887 // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
888 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800889 __ ldrex(tmp_lo, tmp_ptr);
890
891 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
892
893 __ it(EQ, ItState::kItT);
894 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
895 __ cmp(tmp_lo, ShifterOperand(1), EQ);
896
897 __ b(&loop_head, EQ);
898
899 __ dmb(ISH);
900
901 __ rsbs(out, tmp_lo, ShifterOperand(1));
902 __ it(CC);
903 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100904
905 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +0100906 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000907 if (value_lo == expected_lo) {
908 // Do not unpoison `value_lo`, as it is the same register as
909 // `expected_lo`, which has just been unpoisoned.
910 } else {
911 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
912 }
Roland Levillain4d027112015-07-01 15:41:14 +0100913 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800914}
915
Andreas Gampeca714582015-04-03 19:41:34 -0700916void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000917 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800918}
Andreas Gampeca714582015-04-03 19:41:34 -0700919void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +0000920 // The UnsafeCASObject intrinsic is missing a read barrier, and
921 // therefore sometimes does not work as expected (b/25883050).
922 // Turn it off temporarily as a quick fix, until the read barrier is
923 // implemented (see TODO in GenCAS below).
924 //
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000925 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
926 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +0100927 return;
928 }
929
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000930 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800931}
932void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
933 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
934}
935void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
936 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
937}
938
939void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
940 LocationSummary* locations = new (arena_) LocationSummary(invoke,
941 LocationSummary::kCallOnSlowPath,
942 kIntrinsified);
943 locations->SetInAt(0, Location::RequiresRegister());
944 locations->SetInAt(1, Location::RequiresRegister());
945 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
946
947 locations->AddTemp(Location::RequiresRegister());
948 locations->AddTemp(Location::RequiresRegister());
949}
950
951void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
952 ArmAssembler* assembler = GetAssembler();
953 LocationSummary* locations = invoke->GetLocations();
954
955 // Location of reference to data array
956 const MemberOffset value_offset = mirror::String::ValueOffset();
957 // Location of count
958 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800959
960 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
961 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
962 Register out = locations->Out().AsRegister<Register>(); // Result character.
963
964 Register temp = locations->GetTemp(0).AsRegister<Register>();
965 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
966
967 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
968 // the cost.
969 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
970 // we will not optimize the code for constants (which would save a register).
971
Andreas Gampe85b62f22015-09-09 13:15:38 -0700972 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800973 codegen_->AddSlowPath(slow_path);
974
975 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
976 codegen_->MaybeRecordImplicitNullCheck(invoke);
977 __ cmp(idx, ShifterOperand(temp));
978 __ b(slow_path->GetEntryLabel(), CS);
979
Jeff Hao848f70a2014-01-15 13:49:50 -0800980 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800981
982 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800983 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800984
985 __ Bind(slow_path->GetExitLabel());
986}
987
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000988void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
989 // The inputs plus one temp.
990 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100991 invoke->InputAt(1)->CanBeNull()
992 ? LocationSummary::kCallOnSlowPath
993 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000994 kIntrinsified);
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100995 locations->SetInAt(0, Location::RequiresRegister());
996 locations->SetInAt(1, Location::RequiresRegister());
997 locations->AddTemp(Location::RequiresRegister());
998 locations->AddTemp(Location::RequiresRegister());
999 locations->AddTemp(Location::RequiresRegister());
1000 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001001}
1002
1003void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1004 ArmAssembler* assembler = GetAssembler();
1005 LocationSummary* locations = invoke->GetLocations();
1006
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001007 Register str = locations->InAt(0).AsRegister<Register>();
1008 Register arg = locations->InAt(1).AsRegister<Register>();
1009 Register out = locations->Out().AsRegister<Register>();
1010
1011 Register temp0 = locations->GetTemp(0).AsRegister<Register>();
1012 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1013 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1014
1015 Label loop;
1016 Label find_char_diff;
1017 Label end;
1018
1019 // Get offsets of count and value fields within a string object.
1020 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1021 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1022
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001023 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001024 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001025
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001026 // Take slow path and throw if input can be and is null.
1027 SlowPathCode* slow_path = nullptr;
1028 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1029 if (can_slow_path) {
1030 slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1031 codegen_->AddSlowPath(slow_path);
1032 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
1033 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001034
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001035 // Reference equality check, return 0 if same reference.
1036 __ subs(out, str, ShifterOperand(arg));
1037 __ b(&end, EQ);
1038 // Load lengths of this and argument strings.
1039 __ ldr(temp2, Address(str, count_offset));
1040 __ ldr(temp1, Address(arg, count_offset));
1041 // out = length diff.
1042 __ subs(out, temp2, ShifterOperand(temp1));
1043 // temp0 = min(len(str), len(arg)).
1044 __ it(Condition::LT, kItElse);
1045 __ mov(temp0, ShifterOperand(temp2), Condition::LT);
1046 __ mov(temp0, ShifterOperand(temp1), Condition::GE);
1047 // Shorter string is empty?
1048 __ CompareAndBranchIfZero(temp0, &end);
1049
1050 // Store offset of string value in preparation for comparison loop.
1051 __ mov(temp1, ShifterOperand(value_offset));
1052
1053 // Assertions that must hold in order to compare multiple characters at a time.
1054 CHECK_ALIGNED(value_offset, 8);
1055 static_assert(IsAligned<8>(kObjectAlignment),
1056 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1057
1058 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1059 DCHECK_EQ(char_size, 2u);
1060
1061 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1062 __ Bind(&loop);
1063 __ ldr(IP, Address(str, temp1));
1064 __ ldr(temp2, Address(arg, temp1));
1065 __ cmp(IP, ShifterOperand(temp2));
1066 __ b(&find_char_diff, NE);
1067 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1068 __ sub(temp0, temp0, ShifterOperand(2));
1069
1070 __ ldr(IP, Address(str, temp1));
1071 __ ldr(temp2, Address(arg, temp1));
1072 __ cmp(IP, ShifterOperand(temp2));
1073 __ b(&find_char_diff, NE);
1074 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1075 __ subs(temp0, temp0, ShifterOperand(2));
1076
1077 __ b(&loop, GT);
1078 __ b(&end);
1079
1080 // Find the single 16-bit character difference.
1081 __ Bind(&find_char_diff);
1082 // Get the bit position of the first character that differs.
1083 __ eor(temp1, temp2, ShifterOperand(IP));
1084 __ rbit(temp1, temp1);
1085 __ clz(temp1, temp1);
1086
1087 // temp0 = number of 16-bit characters remaining to compare.
1088 // (it could be < 1 if a difference is found after the first SUB in the comparison loop, and
1089 // after the end of the shorter string data).
1090
1091 // (temp1 >> 4) = character where difference occurs between the last two words compared, on the
1092 // interval [0,1] (0 for low half-word different, 1 for high half-word different).
1093
1094 // If temp0 <= (temp1 >> 4), the difference occurs outside the remaining string data, so just
1095 // return length diff (out).
1096 __ cmp(temp0, ShifterOperand(temp1, LSR, 4));
1097 __ b(&end, LE);
1098 // Extract the characters and calculate the difference.
1099 __ bic(temp1, temp1, ShifterOperand(0xf));
1100 __ Lsr(temp2, temp2, temp1);
1101 __ Lsr(IP, IP, temp1);
1102 __ movt(temp2, 0);
1103 __ movt(IP, 0);
1104 __ sub(out, IP, ShifterOperand(temp2));
1105
1106 __ Bind(&end);
1107
1108 if (can_slow_path) {
1109 __ Bind(slow_path->GetExitLabel());
1110 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001111}
1112
Agi Csaki289cd552015-08-18 17:10:38 -07001113void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1114 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1115 LocationSummary::kNoCall,
1116 kIntrinsified);
1117 InvokeRuntimeCallingConvention calling_convention;
1118 locations->SetInAt(0, Location::RequiresRegister());
1119 locations->SetInAt(1, Location::RequiresRegister());
1120 // Temporary registers to store lengths of strings and for calculations.
1121 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1122 locations->AddTemp(Location::RegisterLocation(R0));
1123 locations->AddTemp(Location::RequiresRegister());
1124 locations->AddTemp(Location::RequiresRegister());
1125
1126 locations->SetOut(Location::RequiresRegister());
1127}
1128
1129void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1130 ArmAssembler* assembler = GetAssembler();
1131 LocationSummary* locations = invoke->GetLocations();
1132
1133 Register str = locations->InAt(0).AsRegister<Register>();
1134 Register arg = locations->InAt(1).AsRegister<Register>();
1135 Register out = locations->Out().AsRegister<Register>();
1136
1137 Register temp = locations->GetTemp(0).AsRegister<Register>();
1138 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1139 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1140
1141 Label loop;
1142 Label end;
1143 Label return_true;
1144 Label return_false;
1145
1146 // Get offsets of count, value, and class fields within a string object.
1147 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1148 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1149 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1150
1151 // Note that the null check must have been done earlier.
1152 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1153
Vladimir Marko53b52002016-05-24 19:30:45 +01001154 StringEqualsOptimizations optimizations(invoke);
1155 if (!optimizations.GetArgumentNotNull()) {
1156 // Check if input is null, return false if it is.
1157 __ CompareAndBranchIfZero(arg, &return_false);
1158 }
Agi Csaki289cd552015-08-18 17:10:38 -07001159
Vladimir Marko53b52002016-05-24 19:30:45 +01001160 if (!optimizations.GetArgumentIsString()) {
1161 // Instanceof check for the argument by comparing class fields.
1162 // All string objects must have the same type since String cannot be subclassed.
1163 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1164 // If the argument is a string object, its class field must be equal to receiver's class field.
1165 __ ldr(temp, Address(str, class_offset));
1166 __ ldr(temp1, Address(arg, class_offset));
1167 __ cmp(temp, ShifterOperand(temp1));
1168 __ b(&return_false, NE);
1169 }
Agi Csaki289cd552015-08-18 17:10:38 -07001170
1171 // Load lengths of this and argument strings.
1172 __ ldr(temp, Address(str, count_offset));
1173 __ ldr(temp1, Address(arg, count_offset));
1174 // Check if lengths are equal, return false if they're not.
1175 __ cmp(temp, ShifterOperand(temp1));
1176 __ b(&return_false, NE);
1177 // Return true if both strings are empty.
1178 __ cbz(temp, &return_true);
1179
1180 // Reference equality check, return true if same reference.
1181 __ cmp(str, ShifterOperand(arg));
1182 __ b(&return_true, EQ);
1183
1184 // Assertions that must hold in order to compare strings 2 characters at a time.
1185 DCHECK_ALIGNED(value_offset, 4);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001186 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
Agi Csaki289cd552015-08-18 17:10:38 -07001187
Agi Csaki289cd552015-08-18 17:10:38 -07001188 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001189
1190 // Loop to compare strings 2 characters at a time starting at the front of the string.
1191 // Ok to do this because strings with an odd length are zero-padded.
1192 __ Bind(&loop);
1193 __ ldr(out, Address(str, temp1));
1194 __ ldr(temp2, Address(arg, temp1));
1195 __ cmp(out, ShifterOperand(temp2));
1196 __ b(&return_false, NE);
1197 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001198 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1199 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001200
1201 // Return true and exit the function.
1202 // If loop does not result in returning false, we return true.
1203 __ Bind(&return_true);
1204 __ LoadImmediate(out, 1);
1205 __ b(&end);
1206
1207 // Return false and exit the function.
1208 __ Bind(&return_false);
1209 __ LoadImmediate(out, 0);
1210 __ Bind(&end);
1211}
1212
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001213static void GenerateVisitStringIndexOf(HInvoke* invoke,
1214 ArmAssembler* assembler,
1215 CodeGeneratorARM* codegen,
1216 ArenaAllocator* allocator,
1217 bool start_at_zero) {
1218 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001219
1220 // Note that the null check must have been done earlier.
1221 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1222
1223 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001224 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001225 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001226 HInstruction* code_point = invoke->InputAt(1);
1227 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001228 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001229 std::numeric_limits<uint16_t>::max()) {
1230 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1231 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1232 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1233 codegen->AddSlowPath(slow_path);
1234 __ b(slow_path->GetEntryLabel());
1235 __ Bind(slow_path->GetExitLabel());
1236 return;
1237 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001238 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001239 Register char_reg = locations->InAt(1).AsRegister<Register>();
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001240 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1241 __ cmp(char_reg,
1242 ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001243 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1244 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001245 __ b(slow_path->GetEntryLabel(), HS);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001246 }
1247
1248 if (start_at_zero) {
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001249 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001250 DCHECK_EQ(tmp_reg, R2);
1251 // Start-index = 0.
1252 __ LoadImmediate(tmp_reg, 0);
1253 }
1254
1255 __ LoadFromOffset(kLoadWord, LR, TR,
1256 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
Roland Levillain42ad2882016-02-29 18:26:54 +00001257 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001258 __ blx(LR);
1259
1260 if (slow_path != nullptr) {
1261 __ Bind(slow_path->GetExitLabel());
1262 }
1263}
1264
1265void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1266 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1267 LocationSummary::kCall,
1268 kIntrinsified);
1269 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1270 // best to align the inputs accordingly.
1271 InvokeRuntimeCallingConvention calling_convention;
1272 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1273 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1274 locations->SetOut(Location::RegisterLocation(R0));
1275
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001276 // Need to send start-index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001277 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1278}
1279
1280void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001281 GenerateVisitStringIndexOf(
1282 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001283}
1284
1285void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1286 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1287 LocationSummary::kCall,
1288 kIntrinsified);
1289 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1290 // best to align the inputs accordingly.
1291 InvokeRuntimeCallingConvention calling_convention;
1292 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1293 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1294 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1295 locations->SetOut(Location::RegisterLocation(R0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001296}
1297
1298void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001299 GenerateVisitStringIndexOf(
1300 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001301}
1302
Jeff Hao848f70a2014-01-15 13:49:50 -08001303void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1304 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1305 LocationSummary::kCall,
1306 kIntrinsified);
1307 InvokeRuntimeCallingConvention calling_convention;
1308 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1309 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1310 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1311 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1312 locations->SetOut(Location::RegisterLocation(R0));
1313}
1314
1315void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1316 ArmAssembler* assembler = GetAssembler();
1317 LocationSummary* locations = invoke->GetLocations();
1318
1319 Register byte_array = locations->InAt(0).AsRegister<Register>();
1320 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001321 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001322 codegen_->AddSlowPath(slow_path);
1323 __ b(slow_path->GetEntryLabel(), EQ);
1324
1325 __ LoadFromOffset(
1326 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001327 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001328 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001329 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001330 __ Bind(slow_path->GetExitLabel());
1331}
1332
1333void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1334 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1335 LocationSummary::kCall,
1336 kIntrinsified);
1337 InvokeRuntimeCallingConvention calling_convention;
1338 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1339 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1340 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1341 locations->SetOut(Location::RegisterLocation(R0));
1342}
1343
1344void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1345 ArmAssembler* assembler = GetAssembler();
1346
Roland Levillaincc3839c2016-02-29 16:23:48 +00001347 // No need to emit code checking whether `locations->InAt(2)` is a null
1348 // pointer, as callers of the native method
1349 //
1350 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1351 //
1352 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001353 __ LoadFromOffset(
1354 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001355 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001356 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001357 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001358}
1359
1360void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1361 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1362 LocationSummary::kCall,
1363 kIntrinsified);
1364 InvokeRuntimeCallingConvention calling_convention;
1365 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1366 locations->SetOut(Location::RegisterLocation(R0));
1367}
1368
1369void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1370 ArmAssembler* assembler = GetAssembler();
1371 LocationSummary* locations = invoke->GetLocations();
1372
1373 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1374 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001375 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001376 codegen_->AddSlowPath(slow_path);
1377 __ b(slow_path->GetEntryLabel(), EQ);
1378
1379 __ LoadFromOffset(kLoadWord,
1380 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001381 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001382 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001383 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001384 __ Bind(slow_path->GetExitLabel());
1385}
1386
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001387void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
1388 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1389 LocationSummary* locations = invoke->GetLocations();
1390 if (locations == nullptr) {
1391 return;
1392 }
1393
1394 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1395 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1396 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1397
1398 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1399 locations->SetInAt(1, Location::RequiresRegister());
1400 }
1401 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1402 locations->SetInAt(3, Location::RequiresRegister());
1403 }
1404 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1405 locations->SetInAt(4, Location::RequiresRegister());
1406 }
1407}
1408
1409static void CheckPosition(ArmAssembler* assembler,
1410 Location pos,
1411 Register input,
1412 Location length,
1413 SlowPathCode* slow_path,
1414 Register input_len,
1415 Register temp,
1416 bool length_is_input_length = false) {
1417 // Where is the length in the Array?
1418 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1419
1420 if (pos.IsConstant()) {
1421 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1422 if (pos_const == 0) {
1423 if (!length_is_input_length) {
1424 // Check that length(input) >= length.
1425 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1426 if (length.IsConstant()) {
1427 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1428 } else {
1429 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1430 }
1431 __ b(slow_path->GetEntryLabel(), LT);
1432 }
1433 } else {
1434 // Check that length(input) >= pos.
1435 __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1436 __ subs(temp, input_len, ShifterOperand(pos_const));
1437 __ b(slow_path->GetEntryLabel(), LT);
1438
1439 // Check that (length(input) - pos) >= length.
1440 if (length.IsConstant()) {
1441 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1442 } else {
1443 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1444 }
1445 __ b(slow_path->GetEntryLabel(), LT);
1446 }
1447 } else if (length_is_input_length) {
1448 // The only way the copy can succeed is if pos is zero.
1449 Register pos_reg = pos.AsRegister<Register>();
1450 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1451 } else {
1452 // Check that pos >= 0.
1453 Register pos_reg = pos.AsRegister<Register>();
1454 __ cmp(pos_reg, ShifterOperand(0));
1455 __ b(slow_path->GetEntryLabel(), LT);
1456
1457 // Check that pos <= length(input).
1458 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1459 __ subs(temp, temp, ShifterOperand(pos_reg));
1460 __ b(slow_path->GetEntryLabel(), LT);
1461
1462 // Check that (length(input) - pos) >= length.
1463 if (length.IsConstant()) {
1464 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1465 } else {
1466 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1467 }
1468 __ b(slow_path->GetEntryLabel(), LT);
1469 }
1470}
1471
Roland Levillain3b359c72015-11-17 19:35:12 +00001472// TODO: Implement read barriers in the SystemArrayCopy intrinsic.
1473// Note that this code path is not used (yet) because we do not
1474// intrinsify methods that can go into the IntrinsicSlowPathARM
1475// slow path.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001476void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
1477 ArmAssembler* assembler = GetAssembler();
1478 LocationSummary* locations = invoke->GetLocations();
1479
1480 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1481 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1482 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1483 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1484
1485 Register src = locations->InAt(0).AsRegister<Register>();
1486 Location src_pos = locations->InAt(1);
1487 Register dest = locations->InAt(2).AsRegister<Register>();
1488 Location dest_pos = locations->InAt(3);
1489 Location length = locations->InAt(4);
1490 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1491 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1492 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1493
1494 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1495 codegen_->AddSlowPath(slow_path);
1496
Roland Levillainebea3d22016-04-12 15:42:57 +01001497 Label conditions_on_positions_validated;
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001498 SystemArrayCopyOptimizations optimizations(invoke);
1499
Roland Levillainebea3d22016-04-12 15:42:57 +01001500 if (!optimizations.GetDestinationIsSource() &&
1501 (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
1502 __ cmp(src, ShifterOperand(dest));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001503 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001504 // If source and destination are the same, we go to slow path if we need to do
1505 // forward copying.
1506 if (src_pos.IsConstant()) {
1507 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1508 if (dest_pos.IsConstant()) {
1509 // Checked when building locations.
1510 DCHECK(!optimizations.GetDestinationIsSource()
1511 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1512 } else {
1513 if (!optimizations.GetDestinationIsSource()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001514 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001515 }
1516 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1517 __ b(slow_path->GetEntryLabel(), GT);
1518 }
1519 } else {
1520 if (!optimizations.GetDestinationIsSource()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001521 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001522 }
1523 if (dest_pos.IsConstant()) {
1524 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1525 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1526 } else {
1527 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1528 }
1529 __ b(slow_path->GetEntryLabel(), LT);
1530 }
1531
Roland Levillainebea3d22016-04-12 15:42:57 +01001532 __ Bind(&conditions_on_positions_validated);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001533
1534 if (!optimizations.GetSourceIsNotNull()) {
1535 // Bail out if the source is null.
1536 __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1537 }
1538
1539 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1540 // Bail out if the destination is null.
1541 __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1542 }
1543
1544 // If the length is negative, bail out.
1545 // We have already checked in the LocationsBuilder for the constant case.
1546 if (!length.IsConstant() &&
1547 !optimizations.GetCountIsSourceLength() &&
1548 !optimizations.GetCountIsDestinationLength()) {
1549 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1550 __ b(slow_path->GetEntryLabel(), LT);
1551 }
1552
1553 // Validity checks: source.
1554 CheckPosition(assembler,
1555 src_pos,
1556 src,
1557 length,
1558 slow_path,
1559 temp1,
1560 temp2,
1561 optimizations.GetCountIsSourceLength());
1562
1563 // Validity checks: dest.
1564 CheckPosition(assembler,
1565 dest_pos,
1566 dest,
1567 length,
1568 slow_path,
1569 temp1,
1570 temp2,
1571 optimizations.GetCountIsDestinationLength());
1572
1573 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1574 // Check whether all elements of the source array are assignable to the component
1575 // type of the destination array. We do two checks: the classes are the same,
1576 // or the destination is Object[]. If none of these checks succeed, we go to the
1577 // slow path.
1578 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1579 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1580 bool did_unpoison = false;
1581 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1582 !optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001583 // One or two of the references need to be unpoisoned. Unpoison them
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001584 // both to make the identity check valid.
1585 __ MaybeUnpoisonHeapReference(temp1);
1586 __ MaybeUnpoisonHeapReference(temp2);
1587 did_unpoison = true;
1588 }
1589
1590 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1591 // Bail out if the destination is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001592 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001593 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1594 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1595 __ MaybeUnpoisonHeapReference(temp3);
1596 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1597 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1598 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1599 }
1600
1601 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1602 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001603 // /* HeapReference<Class> */ temp3 = temp2->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001604 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1605 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1606 __ MaybeUnpoisonHeapReference(temp3);
1607 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1608 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1609 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1610 }
1611
1612 __ cmp(temp1, ShifterOperand(temp2));
1613
1614 if (optimizations.GetDestinationIsTypedObjectArray()) {
1615 Label do_copy;
1616 __ b(&do_copy, EQ);
1617 if (!did_unpoison) {
1618 __ MaybeUnpoisonHeapReference(temp1);
1619 }
Roland Levillainebea3d22016-04-12 15:42:57 +01001620 // /* HeapReference<Class> */ temp1 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001621 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1622 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001623 // /* HeapReference<Class> */ temp1 = temp1->super_class_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001624 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1625 // No need to unpoison the result, we're comparing against null.
1626 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1627 __ Bind(&do_copy);
1628 } else {
1629 __ b(slow_path->GetEntryLabel(), NE);
1630 }
1631 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1632 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1633 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001634 // /* HeapReference<Class> */ temp1 = src->klass_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001635 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1636 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001637 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001638 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1639 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1640 __ MaybeUnpoisonHeapReference(temp3);
1641 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1642 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1643 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1644 }
1645
1646 // Compute base source address, base destination address, and end source address.
1647
1648 uint32_t element_size = sizeof(int32_t);
1649 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1650 if (src_pos.IsConstant()) {
1651 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1652 __ AddConstant(temp1, src, element_size * constant + offset);
1653 } else {
1654 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1655 __ AddConstant(temp1, offset);
1656 }
1657
1658 if (dest_pos.IsConstant()) {
1659 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1660 __ AddConstant(temp2, dest, element_size * constant + offset);
1661 } else {
1662 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1663 __ AddConstant(temp2, offset);
1664 }
1665
1666 if (length.IsConstant()) {
1667 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1668 __ AddConstant(temp3, temp1, element_size * constant);
1669 } else {
1670 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1671 }
1672
1673 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1674 // poison/unpoison, nor do any read barrier as the next uses of the destination
1675 // array will do it.
1676 Label loop, done;
1677 __ cmp(temp1, ShifterOperand(temp3));
1678 __ b(&done, EQ);
1679 __ Bind(&loop);
1680 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1681 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1682 __ cmp(temp1, ShifterOperand(temp3));
1683 __ b(&loop, NE);
1684 __ Bind(&done);
1685
1686 // We only need one card marking on the destination array.
1687 codegen_->MarkGCCard(temp1,
1688 temp2,
1689 dest,
1690 Register(kNoRegister),
Roland Levillainebea3d22016-04-12 15:42:57 +01001691 /* value_can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001692
1693 __ Bind(slow_path->GetExitLabel());
1694}
1695
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001696static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1697 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1698 // the code generator. Furthermore, the register allocator creates fixed live intervals
1699 // for all caller-saved registers because we are doing a function call. As a result, if
1700 // the input and output locations are unallocated, the register allocator runs out of
1701 // registers and fails; however, a debuggable graph is not the common case.
1702 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1703 return;
1704 }
1705
1706 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1707 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1708 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1709
1710 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1711 LocationSummary::kCall,
1712 kIntrinsified);
1713 const InvokeRuntimeCallingConvention calling_convention;
1714
1715 locations->SetInAt(0, Location::RequiresFpuRegister());
1716 locations->SetOut(Location::RequiresFpuRegister());
1717 // Native code uses the soft float ABI.
1718 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1719 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1720}
1721
1722static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1723 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1724 // the code generator. Furthermore, the register allocator creates fixed live intervals
1725 // for all caller-saved registers because we are doing a function call. As a result, if
1726 // the input and output locations are unallocated, the register allocator runs out of
1727 // registers and fails; however, a debuggable graph is not the common case.
1728 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1729 return;
1730 }
1731
1732 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1733 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1734 DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
1735 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1736
1737 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1738 LocationSummary::kCall,
1739 kIntrinsified);
1740 const InvokeRuntimeCallingConvention calling_convention;
1741
1742 locations->SetInAt(0, Location::RequiresFpuRegister());
1743 locations->SetInAt(1, Location::RequiresFpuRegister());
1744 locations->SetOut(Location::RequiresFpuRegister());
1745 // Native code uses the soft float ABI.
1746 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1747 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1748 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1749 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1750}
1751
1752static void GenFPToFPCall(HInvoke* invoke,
1753 ArmAssembler* assembler,
1754 CodeGeneratorARM* codegen,
1755 QuickEntrypointEnum entry) {
1756 LocationSummary* const locations = invoke->GetLocations();
1757 const InvokeRuntimeCallingConvention calling_convention;
1758
1759 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1760 DCHECK(locations->WillCall() && locations->Intrinsified());
1761 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1762 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1763
1764 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1765 // Native code uses the soft float ABI.
1766 __ vmovrrd(calling_convention.GetRegisterAt(0),
1767 calling_convention.GetRegisterAt(1),
1768 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1769 __ blx(LR);
1770 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1771 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1772 calling_convention.GetRegisterAt(0),
1773 calling_convention.GetRegisterAt(1));
1774}
1775
1776static void GenFPFPToFPCall(HInvoke* invoke,
1777 ArmAssembler* assembler,
1778 CodeGeneratorARM* codegen,
1779 QuickEntrypointEnum entry) {
1780 LocationSummary* const locations = invoke->GetLocations();
1781 const InvokeRuntimeCallingConvention calling_convention;
1782
1783 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1784 DCHECK(locations->WillCall() && locations->Intrinsified());
1785 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1786 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1787 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
1788 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
1789
1790 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1791 // Native code uses the soft float ABI.
1792 __ vmovrrd(calling_convention.GetRegisterAt(0),
1793 calling_convention.GetRegisterAt(1),
1794 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1795 __ vmovrrd(calling_convention.GetRegisterAt(2),
1796 calling_convention.GetRegisterAt(3),
1797 FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
1798 __ blx(LR);
1799 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1800 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1801 calling_convention.GetRegisterAt(0),
1802 calling_convention.GetRegisterAt(1));
1803}
1804
1805void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
1806 CreateFPToFPCallLocations(arena_, invoke);
1807}
1808
1809void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
1810 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
1811}
1812
1813void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
1814 CreateFPToFPCallLocations(arena_, invoke);
1815}
1816
1817void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
1818 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
1819}
1820
1821void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
1822 CreateFPToFPCallLocations(arena_, invoke);
1823}
1824
1825void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
1826 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
1827}
1828
1829void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
1830 CreateFPToFPCallLocations(arena_, invoke);
1831}
1832
1833void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
1834 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
1835}
1836
1837void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
1838 CreateFPToFPCallLocations(arena_, invoke);
1839}
1840
1841void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
1842 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
1843}
1844
1845void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
1846 CreateFPToFPCallLocations(arena_, invoke);
1847}
1848
1849void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
1850 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
1851}
1852
1853void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
1854 CreateFPToFPCallLocations(arena_, invoke);
1855}
1856
1857void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
1858 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
1859}
1860
1861void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
1862 CreateFPToFPCallLocations(arena_, invoke);
1863}
1864
1865void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
1866 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
1867}
1868
1869void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
1870 CreateFPToFPCallLocations(arena_, invoke);
1871}
1872
1873void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
1874 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
1875}
1876
1877void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
1878 CreateFPToFPCallLocations(arena_, invoke);
1879}
1880
1881void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
1882 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
1883}
1884
1885void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
1886 CreateFPToFPCallLocations(arena_, invoke);
1887}
1888
1889void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
1890 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
1891}
1892
1893void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
1894 CreateFPToFPCallLocations(arena_, invoke);
1895}
1896
1897void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
1898 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
1899}
1900
1901void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
1902 CreateFPToFPCallLocations(arena_, invoke);
1903}
1904
1905void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
1906 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
1907}
1908
1909void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
1910 CreateFPToFPCallLocations(arena_, invoke);
1911}
1912
1913void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
1914 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
1915}
1916
1917void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
1918 CreateFPFPToFPCallLocations(arena_, invoke);
1919}
1920
1921void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
1922 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
1923}
1924
1925void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
1926 CreateFPFPToFPCallLocations(arena_, invoke);
1927}
1928
1929void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
1930 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
1931}
1932
1933void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
1934 CreateFPFPToFPCallLocations(arena_, invoke);
1935}
1936
1937void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
1938 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
1939}
1940
Artem Serovc257da72016-02-02 13:49:43 +00001941void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
1942 CreateIntToIntLocations(arena_, invoke);
1943}
1944
1945void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
1946 ArmAssembler* assembler = GetAssembler();
1947 LocationSummary* locations = invoke->GetLocations();
1948
1949 Register out = locations->Out().AsRegister<Register>();
1950 Register in = locations->InAt(0).AsRegister<Register>();
1951
1952 __ rbit(out, in);
1953}
1954
1955void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
1956 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1957 LocationSummary::kNoCall,
1958 kIntrinsified);
1959 locations->SetInAt(0, Location::RequiresRegister());
1960 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1961}
1962
1963void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
1964 ArmAssembler* assembler = GetAssembler();
1965 LocationSummary* locations = invoke->GetLocations();
1966
1967 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1968 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1969 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1970 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1971
1972 __ rbit(out_reg_lo, in_reg_hi);
1973 __ rbit(out_reg_hi, in_reg_lo);
1974}
1975
1976void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1977 CreateIntToIntLocations(arena_, invoke);
1978}
1979
1980void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1981 ArmAssembler* assembler = GetAssembler();
1982 LocationSummary* locations = invoke->GetLocations();
1983
1984 Register out = locations->Out().AsRegister<Register>();
1985 Register in = locations->InAt(0).AsRegister<Register>();
1986
1987 __ rev(out, in);
1988}
1989
1990void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
1991 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1992 LocationSummary::kNoCall,
1993 kIntrinsified);
1994 locations->SetInAt(0, Location::RequiresRegister());
1995 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1996}
1997
1998void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
1999 ArmAssembler* assembler = GetAssembler();
2000 LocationSummary* locations = invoke->GetLocations();
2001
2002 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2003 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2004 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2005 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2006
2007 __ rev(out_reg_lo, in_reg_hi);
2008 __ rev(out_reg_hi, in_reg_lo);
2009}
2010
2011void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
2012 CreateIntToIntLocations(arena_, invoke);
2013}
2014
2015void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
2016 ArmAssembler* assembler = GetAssembler();
2017 LocationSummary* locations = invoke->GetLocations();
2018
2019 Register out = locations->Out().AsRegister<Register>();
2020 Register in = locations->InAt(0).AsRegister<Register>();
2021
2022 __ revsh(out, in);
2023}
2024
Tim Zhang25abd6c2016-01-19 23:39:24 +08002025void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2026 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2027 LocationSummary::kNoCall,
2028 kIntrinsified);
2029 locations->SetInAt(0, Location::RequiresRegister());
2030 locations->SetInAt(1, Location::RequiresRegister());
2031 locations->SetInAt(2, Location::RequiresRegister());
2032 locations->SetInAt(3, Location::RequiresRegister());
2033 locations->SetInAt(4, Location::RequiresRegister());
2034
2035 locations->AddTemp(Location::RequiresRegister());
2036 locations->AddTemp(Location::RequiresRegister());
2037 locations->AddTemp(Location::RequiresRegister());
2038 locations->AddTemp(Location::RequiresRegister());
2039}
2040
2041void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2042 ArmAssembler* assembler = GetAssembler();
2043 LocationSummary* locations = invoke->GetLocations();
2044
2045 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2046 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2047 DCHECK_EQ(char_size, 2u);
2048
2049 // Location of data in char array buffer.
2050 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2051
2052 // Location of char array data in string.
2053 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2054
2055 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2056 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2057 Register srcObj = locations->InAt(0).AsRegister<Register>();
2058 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2059 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2060 Register dstObj = locations->InAt(3).AsRegister<Register>();
2061 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2062
2063 Register src_ptr = locations->GetTemp(0).AsRegister<Register>();
2064 Register src_ptr_end = locations->GetTemp(1).AsRegister<Register>();
2065 Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
2066 Register tmp = locations->GetTemp(3).AsRegister<Register>();
2067
2068 // src range to copy.
2069 __ add(src_ptr, srcObj, ShifterOperand(value_offset));
2070 __ add(src_ptr_end, src_ptr, ShifterOperand(srcEnd, LSL, 1));
2071 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
2072
2073 // dst to be copied.
2074 __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
2075 __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
2076
2077 // Do the copy.
2078 Label loop, done;
2079 __ Bind(&loop);
2080 __ cmp(src_ptr, ShifterOperand(src_ptr_end));
2081 __ b(&done, EQ);
2082 __ ldrh(tmp, Address(src_ptr, char_size, Address::PostIndex));
2083 __ strh(tmp, Address(dst_ptr, char_size, Address::PostIndex));
2084 __ b(&loop);
2085 __ Bind(&done);
2086}
2087
Anton Kirilova3ffea22016-04-07 17:02:37 +01002088void IntrinsicLocationsBuilderARM::VisitFloatIsInfinite(HInvoke* invoke) {
2089 CreateFPToIntLocations(arena_, invoke);
2090}
2091
2092void IntrinsicCodeGeneratorARM::VisitFloatIsInfinite(HInvoke* invoke) {
2093 ArmAssembler* const assembler = GetAssembler();
2094 LocationSummary* const locations = invoke->GetLocations();
2095 const Register out = locations->Out().AsRegister<Register>();
2096 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2097 // we don't care about the sign bit anyway.
2098 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2099
2100 __ vmovrs(out, locations->InAt(0).AsFpuRegister<SRegister>());
2101 // We don't care about the sign bit, so shift left.
2102 __ Lsl(out, out, 1);
2103 __ eor(out, out, ShifterOperand(infinity));
2104 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2105 __ clz(out, out);
2106 // Any number less than 32 logically shifted right by 5 bits results in 0;
2107 // the same operation on 32 yields 1.
2108 __ Lsr(out, out, 5);
2109}
2110
2111void IntrinsicLocationsBuilderARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2112 CreateFPToIntLocations(arena_, invoke);
2113}
2114
2115void IntrinsicCodeGeneratorARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2116 ArmAssembler* const assembler = GetAssembler();
2117 LocationSummary* const locations = invoke->GetLocations();
2118 const Register out = locations->Out().AsRegister<Register>();
2119 // The highest 32 bits of double precision positive infinity separated into
2120 // two constants encodable as immediate operands.
2121 constexpr uint32_t infinity_high = 0x7f000000U;
2122 constexpr uint32_t infinity_high2 = 0x00f00000U;
2123
2124 static_assert((infinity_high | infinity_high2) == static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2125 "The constants do not add up to the high 32 bits of double precision positive infinity.");
2126 __ vmovrrd(IP, out, FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2127 __ eor(out, out, ShifterOperand(infinity_high));
2128 __ eor(out, out, ShifterOperand(infinity_high2));
2129 // We don't care about the sign bit, so shift left.
2130 __ orr(out, IP, ShifterOperand(out, LSL, 1));
2131 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2132 __ clz(out, out);
2133 // Any number less than 32 logically shifted right by 5 bits results in 0;
2134 // the same operation on 32 yields 1.
2135 __ Lsr(out, out, 5);
2136}
2137
Aart Bik2f9fcc92016-03-01 15:16:54 -08002138UNIMPLEMENTED_INTRINSIC(ARM, IntegerBitCount)
2139UNIMPLEMENTED_INTRINSIC(ARM, LongBitCount)
2140UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
2141UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
2142UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2143UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2144UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2145UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2146UNIMPLEMENTED_INTRINSIC(ARM, MathCeil) // Could be done by changing rounding mode, maybe?
2147UNIMPLEMENTED_INTRINSIC(ARM, MathFloor) // Could be done by changing rounding mode, maybe?
2148UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2149UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble) // Could be done by changing rounding mode, maybe?
2150UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
2151UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
2152UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
2153UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002154UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2155UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2156UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2157UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002158
Aart Bik0e54c012016-03-04 12:08:31 -08002159// 1.8.
2160UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2161UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2162UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2163UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2164UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002165
Aart Bik2f9fcc92016-03-01 15:16:54 -08002166UNREACHABLE_INTRINSICS(ARM)
Roland Levillain4d027112015-07-01 15:41:14 +01002167
2168#undef __
2169
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002170} // namespace arm
2171} // namespace art