blob: 927e2ecfbb72b568733290978b0c910875b4b079 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
xueliang.zhong49924c92016-03-03 10:52:51 +000049using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080050
Andreas Gampe878d58c2015-01-15 23:24:00 -080051namespace {
52
53ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
54 return MemOperand(XRegisterFrom(location), offset);
55}
56
57} // namespace
58
59vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
60 return codegen_->GetAssembler()->vixl_masm_;
61}
62
63ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
64 return codegen_->GetGraph()->GetArena();
65}
66
67#define __ codegen->GetAssembler()->vixl_masm_->
68
69static void MoveFromReturnRegister(Location trg,
70 Primitive::Type type,
71 CodeGeneratorARM64* codegen) {
72 if (!trg.IsValid()) {
73 DCHECK(type == Primitive::kPrimVoid);
74 return;
75 }
76
77 DCHECK_NE(type, Primitive::kPrimVoid);
78
Jeff Hao848f70a2014-01-15 13:49:50 -080079 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080080 Register trg_reg = RegisterFrom(trg, type);
81 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
82 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
83 } else {
84 FPRegister trg_reg = FPRegisterFrom(trg, type);
85 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
86 __ Fmov(trg_reg, res_reg);
87 }
88}
89
Roland Levillainec525fc2015-04-28 15:50:20 +010090static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010091 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010092 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080093}
94
95// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
96// call. This will copy the arguments into the positions for a regular call.
97//
98// Note: The actual parameters are required to be in the locations given by the invoke's location
99// summary. If an intrinsic modifies those locations before a slowpath call, they must be
100// restored!
101class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
102 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000103 explicit IntrinsicSlowPathARM64(HInvoke* invoke)
104 : SlowPathCodeARM64(invoke), invoke_(invoke) { }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800105
106 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
107 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
108 __ Bind(GetEntryLabel());
109
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000110 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
Roland Levillainec525fc2015-04-28 15:50:20 +0100112 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800113
114 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100115 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
116 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000120 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800121
122 // Copy the result back to the expected output.
123 Location out = invoke_->GetLocations()->Out();
124 if (out.IsValid()) {
125 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
126 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
127 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
128 }
129
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000130 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800131 __ B(GetExitLabel());
132 }
133
Alexandre Rames9931f312015-06-19 14:47:01 +0100134 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
135
Andreas Gampe878d58c2015-01-15 23:24:00 -0800136 private:
137 // The instruction where this slow path is happening.
138 HInvoke* const invoke_;
139
140 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
141};
142
143#undef __
144
145bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
146 Dispatch(invoke);
147 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000148 if (res == nullptr) {
149 return false;
150 }
151 if (kEmitCompilerReadBarrier && res->CanCall()) {
152 // Generating an intrinsic for this HInvoke may produce an
153 // IntrinsicSlowPathARM64 slow path. Currently this approach
154 // does not work when using read barriers, as the emitted
155 // calling sequence will make use of another slow path
156 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
157 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
158 // out in this case.
159 //
160 // TODO: Find a way to have intrinsics work with read barriers.
161 invoke->SetLocations(nullptr);
162 return false;
163 }
164 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800165}
166
167#define __ masm->
168
169static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
170 LocationSummary* locations = new (arena) LocationSummary(invoke,
171 LocationSummary::kNoCall,
172 kIntrinsified);
173 locations->SetInAt(0, Location::RequiresFpuRegister());
174 locations->SetOut(Location::RequiresRegister());
175}
176
177static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
178 LocationSummary* locations = new (arena) LocationSummary(invoke,
179 LocationSummary::kNoCall,
180 kIntrinsified);
181 locations->SetInAt(0, Location::RequiresRegister());
182 locations->SetOut(Location::RequiresFpuRegister());
183}
184
185static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
186 Location input = locations->InAt(0);
187 Location output = locations->Out();
188 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
189 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
190}
191
192static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
193 Location input = locations->InAt(0);
194 Location output = locations->Out();
195 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
196 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
197}
198
199void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
200 CreateFPToIntLocations(arena_, invoke);
201}
202void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
203 CreateIntToFPLocations(arena_, invoke);
204}
205
206void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000207 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800208}
209void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000210 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800211}
212
213void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
214 CreateFPToIntLocations(arena_, invoke);
215}
216void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
217 CreateIntToFPLocations(arena_, invoke);
218}
219
220void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000221 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800222}
223void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000224 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800225}
226
227static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
228 LocationSummary* locations = new (arena) LocationSummary(invoke,
229 LocationSummary::kNoCall,
230 kIntrinsified);
231 locations->SetInAt(0, Location::RequiresRegister());
232 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
233}
234
235static void GenReverseBytes(LocationSummary* locations,
236 Primitive::Type type,
237 vixl::MacroAssembler* masm) {
238 Location in = locations->InAt(0);
239 Location out = locations->Out();
240
241 switch (type) {
242 case Primitive::kPrimShort:
243 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
244 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
245 break;
246 case Primitive::kPrimInt:
247 case Primitive::kPrimLong:
248 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
249 break;
250 default:
251 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
252 UNREACHABLE();
253 }
254}
255
256void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
257 CreateIntToIntLocations(arena_, invoke);
258}
259
260void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
261 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
262}
263
264void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
265 CreateIntToIntLocations(arena_, invoke);
266}
267
268void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
269 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
270}
271
272void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
273 CreateIntToIntLocations(arena_, invoke);
274}
275
276void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
277 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
278}
279
Aart Bik7b565022016-01-28 14:36:22 -0800280static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
281 LocationSummary* locations = new (arena) LocationSummary(invoke,
282 LocationSummary::kNoCall,
283 kIntrinsified);
284 locations->SetInAt(0, Location::RequiresRegister());
285 locations->SetInAt(1, Location::RequiresRegister());
286 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
287}
288
Scott Wakeling611d3392015-07-10 11:42:06 +0100289static void GenNumberOfLeadingZeros(LocationSummary* locations,
290 Primitive::Type type,
291 vixl::MacroAssembler* masm) {
292 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
293
294 Location in = locations->InAt(0);
295 Location out = locations->Out();
296
297 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
298}
299
300void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
305 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
306}
307
308void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
309 CreateIntToIntLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
313 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
314}
315
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100316static void GenNumberOfTrailingZeros(LocationSummary* locations,
317 Primitive::Type type,
318 vixl::MacroAssembler* masm) {
319 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
320
321 Location in = locations->InAt(0);
322 Location out = locations->Out();
323
324 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
325 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
326}
327
328void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
329 CreateIntToIntLocations(arena_, invoke);
330}
331
332void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
333 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
334}
335
336void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
337 CreateIntToIntLocations(arena_, invoke);
338}
339
340void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
341 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
342}
343
Andreas Gampe878d58c2015-01-15 23:24:00 -0800344static void GenReverse(LocationSummary* locations,
345 Primitive::Type type,
346 vixl::MacroAssembler* masm) {
347 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
348
349 Location in = locations->InAt(0);
350 Location out = locations->Out();
351
352 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
353}
354
355void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
356 CreateIntToIntLocations(arena_, invoke);
357}
358
359void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
360 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
361}
362
363void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
368 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
369}
370
Roland Levillainfa3912e2016-04-01 18:21:55 +0100371static void GenBitCount(HInvoke* instr, Primitive::Type type, vixl::MacroAssembler* masm) {
372 DCHECK(Primitive::IsIntOrLongType(type)) << type;
373 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
374 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
xueliang.zhong49924c92016-03-03 10:52:51 +0000375
xueliang.zhong49924c92016-03-03 10:52:51 +0000376 UseScratchRegisterScope temps(masm);
377
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000378 Register src = InputRegisterAt(instr, 0);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100379 Register dst = RegisterFrom(instr->GetLocations()->Out(), type);
380 FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS();
xueliang.zhong49924c92016-03-03 10:52:51 +0000381
382 __ Fmov(fpr, src);
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000383 __ Cnt(fpr.V8B(), fpr.V8B());
384 __ Addv(fpr.B(), fpr.V8B());
xueliang.zhong49924c92016-03-03 10:52:51 +0000385 __ Fmov(dst, fpr);
386}
387
388void IntrinsicLocationsBuilderARM64::VisitLongBitCount(HInvoke* invoke) {
389 CreateIntToIntLocations(arena_, invoke);
390}
391
392void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100393 GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000394}
395
396void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) {
397 CreateIntToIntLocations(arena_, invoke);
398}
399
400void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100401 GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000402}
403
Andreas Gampe878d58c2015-01-15 23:24:00 -0800404static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800405 LocationSummary* locations = new (arena) LocationSummary(invoke,
406 LocationSummary::kNoCall,
407 kIntrinsified);
408 locations->SetInAt(0, Location::RequiresFpuRegister());
409 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
410}
411
412static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
413 Location in = locations->InAt(0);
414 Location out = locations->Out();
415
416 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
417 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
418
419 __ Fabs(out_reg, in_reg);
420}
421
422void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
423 CreateFPToFPLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000427 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800428}
429
430void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
431 CreateFPToFPLocations(arena_, invoke);
432}
433
434void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000435 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800436}
437
438static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
439 LocationSummary* locations = new (arena) LocationSummary(invoke,
440 LocationSummary::kNoCall,
441 kIntrinsified);
442 locations->SetInAt(0, Location::RequiresRegister());
443 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
444}
445
446static void GenAbsInteger(LocationSummary* locations,
447 bool is64bit,
448 vixl::MacroAssembler* masm) {
449 Location in = locations->InAt(0);
450 Location output = locations->Out();
451
452 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
453 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
454
455 __ Cmp(in_reg, Operand(0));
456 __ Cneg(out_reg, in_reg, lt);
457}
458
459void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
460 CreateIntToInt(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000464 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800465}
466
467void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
468 CreateIntToInt(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000472 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800473}
474
475static void GenMinMaxFP(LocationSummary* locations,
476 bool is_min,
477 bool is_double,
478 vixl::MacroAssembler* masm) {
479 Location op1 = locations->InAt(0);
480 Location op2 = locations->InAt(1);
481 Location out = locations->Out();
482
483 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
484 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
485 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
486 if (is_min) {
487 __ Fmin(out_reg, op1_reg, op2_reg);
488 } else {
489 __ Fmax(out_reg, op1_reg, op2_reg);
490 }
491}
492
493static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
494 LocationSummary* locations = new (arena) LocationSummary(invoke,
495 LocationSummary::kNoCall,
496 kIntrinsified);
497 locations->SetInAt(0, Location::RequiresFpuRegister());
498 locations->SetInAt(1, Location::RequiresFpuRegister());
499 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
500}
501
502void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
503 CreateFPFPToFPLocations(arena_, invoke);
504}
505
506void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000507 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800508}
509
510void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
511 CreateFPFPToFPLocations(arena_, invoke);
512}
513
514void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000515 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800516}
517
518void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
519 CreateFPFPToFPLocations(arena_, invoke);
520}
521
522void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000523 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800524}
525
526void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
527 CreateFPFPToFPLocations(arena_, invoke);
528}
529
530void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000531 GenMinMaxFP(
532 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800533}
534
535static void GenMinMax(LocationSummary* locations,
536 bool is_min,
537 bool is_long,
538 vixl::MacroAssembler* masm) {
539 Location op1 = locations->InAt(0);
540 Location op2 = locations->InAt(1);
541 Location out = locations->Out();
542
543 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
544 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
545 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
546
547 __ Cmp(op1_reg, op2_reg);
548 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
549}
550
Andreas Gampe878d58c2015-01-15 23:24:00 -0800551void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
552 CreateIntIntToIntLocations(arena_, invoke);
553}
554
555void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000556 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800557}
558
559void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
560 CreateIntIntToIntLocations(arena_, invoke);
561}
562
563void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000564 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800565}
566
567void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
568 CreateIntIntToIntLocations(arena_, invoke);
569}
570
571void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000572 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800573}
574
575void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
576 CreateIntIntToIntLocations(arena_, invoke);
577}
578
579void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000580 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800581}
582
583void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
584 CreateFPToFPLocations(arena_, invoke);
585}
586
587void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
588 LocationSummary* locations = invoke->GetLocations();
589 vixl::MacroAssembler* masm = GetVIXLAssembler();
590 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
591}
592
593void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
594 CreateFPToFPLocations(arena_, invoke);
595}
596
597void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
598 LocationSummary* locations = invoke->GetLocations();
599 vixl::MacroAssembler* masm = GetVIXLAssembler();
600 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
601}
602
603void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
604 CreateFPToFPLocations(arena_, invoke);
605}
606
607void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
608 LocationSummary* locations = invoke->GetLocations();
609 vixl::MacroAssembler* masm = GetVIXLAssembler();
610 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
611}
612
613void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
614 CreateFPToFPLocations(arena_, invoke);
615}
616
617void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
618 LocationSummary* locations = invoke->GetLocations();
619 vixl::MacroAssembler* masm = GetVIXLAssembler();
620 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
621}
622
623static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
624 LocationSummary* locations = new (arena) LocationSummary(invoke,
625 LocationSummary::kNoCall,
626 kIntrinsified);
627 locations->SetInAt(0, Location::RequiresFpuRegister());
628 locations->SetOut(Location::RequiresRegister());
629}
630
631static void GenMathRound(LocationSummary* locations,
632 bool is_double,
633 vixl::MacroAssembler* masm) {
634 FPRegister in_reg = is_double ?
635 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
636 Register out_reg = is_double ?
637 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
638 UseScratchRegisterScope temps(masm);
639 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
640
641 // 0.5 can be encoded as an immediate, so use fmov.
642 if (is_double) {
643 __ Fmov(temp1_reg, static_cast<double>(0.5));
644 } else {
645 __ Fmov(temp1_reg, static_cast<float>(0.5));
646 }
647 __ Fadd(temp1_reg, in_reg, temp1_reg);
648 __ Fcvtms(out_reg, temp1_reg);
649}
650
651void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800652 // See intrinsics.h.
653 if (kRoundIsPlusPointFive) {
654 CreateFPToIntPlusTempLocations(arena_, invoke);
655 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800656}
657
658void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000659 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800660}
661
662void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800663 // See intrinsics.h.
664 if (kRoundIsPlusPointFive) {
665 CreateFPToIntPlusTempLocations(arena_, invoke);
666 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800667}
668
669void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000670 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800671}
672
673void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
674 CreateIntToIntLocations(arena_, invoke);
675}
676
677void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
678 vixl::MacroAssembler* masm = GetVIXLAssembler();
679 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
680 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
681}
682
683void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
684 CreateIntToIntLocations(arena_, invoke);
685}
686
687void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
688 vixl::MacroAssembler* masm = GetVIXLAssembler();
689 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
690 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
691}
692
693void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
694 CreateIntToIntLocations(arena_, invoke);
695}
696
697void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
698 vixl::MacroAssembler* masm = GetVIXLAssembler();
699 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
700 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
701}
702
703void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
704 CreateIntToIntLocations(arena_, invoke);
705}
706
707void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
708 vixl::MacroAssembler* masm = GetVIXLAssembler();
709 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
710 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
711}
712
713static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
714 LocationSummary* locations = new (arena) LocationSummary(invoke,
715 LocationSummary::kNoCall,
716 kIntrinsified);
717 locations->SetInAt(0, Location::RequiresRegister());
718 locations->SetInAt(1, Location::RequiresRegister());
719}
720
721void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
722 CreateIntIntToVoidLocations(arena_, invoke);
723}
724
725void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
726 vixl::MacroAssembler* masm = GetVIXLAssembler();
727 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
728 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
729}
730
731void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
732 CreateIntIntToVoidLocations(arena_, invoke);
733}
734
735void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
736 vixl::MacroAssembler* masm = GetVIXLAssembler();
737 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
738 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
739}
740
741void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
742 CreateIntIntToVoidLocations(arena_, invoke);
743}
744
745void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
746 vixl::MacroAssembler* masm = GetVIXLAssembler();
747 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
748 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
749}
750
751void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
752 CreateIntIntToVoidLocations(arena_, invoke);
753}
754
755void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
756 vixl::MacroAssembler* masm = GetVIXLAssembler();
757 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
758 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
759}
760
761void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
762 LocationSummary* locations = new (arena_) LocationSummary(invoke,
763 LocationSummary::kNoCall,
764 kIntrinsified);
765 locations->SetOut(Location::RequiresRegister());
766}
767
768void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
769 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
770 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
771}
772
773static void GenUnsafeGet(HInvoke* invoke,
774 Primitive::Type type,
775 bool is_volatile,
776 CodeGeneratorARM64* codegen) {
777 LocationSummary* locations = invoke->GetLocations();
778 DCHECK((type == Primitive::kPrimInt) ||
779 (type == Primitive::kPrimLong) ||
780 (type == Primitive::kPrimNot));
781 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000782 Location base_loc = locations->InAt(1);
783 Register base = WRegisterFrom(base_loc); // Object pointer.
784 Location offset_loc = locations->InAt(2);
785 Register offset = XRegisterFrom(offset_loc); // Long offset.
786 Location trg_loc = locations->Out();
787 Register trg = RegisterFrom(trg_loc, type);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800788
Roland Levillain44015862016-01-22 11:47:17 +0000789 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
790 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
791 UseScratchRegisterScope temps(masm);
792 Register temp = temps.AcquireW();
793 codegen->GenerateArrayLoadWithBakerReadBarrier(
794 invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800795 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000796 // Other cases.
797 MemOperand mem_op(base.X(), offset);
798 if (is_volatile) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000799 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
Roland Levillain44015862016-01-22 11:47:17 +0000800 } else {
801 codegen->Load(type, trg, mem_op);
802 }
Roland Levillain4d027112015-07-01 15:41:14 +0100803
Roland Levillain44015862016-01-22 11:47:17 +0000804 if (type == Primitive::kPrimNot) {
805 DCHECK(trg.IsW());
806 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
807 }
Roland Levillain4d027112015-07-01 15:41:14 +0100808 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800809}
810
811static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000812 bool can_call = kEmitCompilerReadBarrier &&
813 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
814 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800815 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000816 can_call ?
817 LocationSummary::kCallOnSlowPath :
818 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800819 kIntrinsified);
820 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
821 locations->SetInAt(1, Location::RequiresRegister());
822 locations->SetInAt(2, Location::RequiresRegister());
823 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
824}
825
826void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
827 CreateIntIntIntToIntLocations(arena_, invoke);
828}
829void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
830 CreateIntIntIntToIntLocations(arena_, invoke);
831}
832void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
833 CreateIntIntIntToIntLocations(arena_, invoke);
834}
835void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
836 CreateIntIntIntToIntLocations(arena_, invoke);
837}
838void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
839 CreateIntIntIntToIntLocations(arena_, invoke);
840}
841void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
842 CreateIntIntIntToIntLocations(arena_, invoke);
843}
844
845void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000846 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800847}
848void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000849 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800850}
851void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000852 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800853}
854void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000855 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800856}
857void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000858 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800859}
860void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000861 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800862}
863
864static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
865 LocationSummary* locations = new (arena) LocationSummary(invoke,
866 LocationSummary::kNoCall,
867 kIntrinsified);
868 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
869 locations->SetInAt(1, Location::RequiresRegister());
870 locations->SetInAt(2, Location::RequiresRegister());
871 locations->SetInAt(3, Location::RequiresRegister());
872}
873
874void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
875 CreateIntIntIntIntToVoid(arena_, invoke);
876}
877void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
878 CreateIntIntIntIntToVoid(arena_, invoke);
879}
880void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
881 CreateIntIntIntIntToVoid(arena_, invoke);
882}
883void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
884 CreateIntIntIntIntToVoid(arena_, invoke);
885}
886void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
887 CreateIntIntIntIntToVoid(arena_, invoke);
888}
889void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
890 CreateIntIntIntIntToVoid(arena_, invoke);
891}
892void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
893 CreateIntIntIntIntToVoid(arena_, invoke);
894}
895void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
896 CreateIntIntIntIntToVoid(arena_, invoke);
897}
898void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
899 CreateIntIntIntIntToVoid(arena_, invoke);
900}
901
902static void GenUnsafePut(LocationSummary* locations,
903 Primitive::Type type,
904 bool is_volatile,
905 bool is_ordered,
906 CodeGeneratorARM64* codegen) {
907 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
908
909 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
910 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
911 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100912 Register source = value;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800913 MemOperand mem_op(base.X(), offset);
914
Roland Levillain4d027112015-07-01 15:41:14 +0100915 {
916 // We use a block to end the scratch scope before the write barrier, thus
917 // freeing the temporary registers so they can be used in `MarkGCCard`.
918 UseScratchRegisterScope temps(masm);
919
920 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
921 DCHECK(value.IsW());
922 Register temp = temps.AcquireW();
923 __ Mov(temp.W(), value.W());
924 codegen->GetAssembler()->PoisonHeapReference(temp.W());
925 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800926 }
Roland Levillain4d027112015-07-01 15:41:14 +0100927
928 if (is_volatile || is_ordered) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000929 codegen->StoreRelease(type, source, mem_op);
Roland Levillain4d027112015-07-01 15:41:14 +0100930 } else {
931 codegen->Store(type, source, mem_op);
932 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800933 }
934
935 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100936 bool value_can_be_null = true; // TODO: Worth finding out this information?
937 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800938 }
939}
940
941void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000942 GenUnsafePut(invoke->GetLocations(),
943 Primitive::kPrimInt,
944 /* is_volatile */ false,
945 /* is_ordered */ false,
946 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800947}
948void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000949 GenUnsafePut(invoke->GetLocations(),
950 Primitive::kPrimInt,
951 /* is_volatile */ false,
952 /* is_ordered */ true,
953 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800954}
955void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000956 GenUnsafePut(invoke->GetLocations(),
957 Primitive::kPrimInt,
958 /* is_volatile */ true,
959 /* is_ordered */ false,
960 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800961}
962void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000963 GenUnsafePut(invoke->GetLocations(),
964 Primitive::kPrimNot,
965 /* is_volatile */ false,
966 /* is_ordered */ false,
967 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800968}
969void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000970 GenUnsafePut(invoke->GetLocations(),
971 Primitive::kPrimNot,
972 /* is_volatile */ false,
973 /* is_ordered */ true,
974 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800975}
976void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000977 GenUnsafePut(invoke->GetLocations(),
978 Primitive::kPrimNot,
979 /* is_volatile */ true,
980 /* is_ordered */ false,
981 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800982}
983void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000984 GenUnsafePut(invoke->GetLocations(),
985 Primitive::kPrimLong,
986 /* is_volatile */ false,
987 /* is_ordered */ false,
988 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800989}
990void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000991 GenUnsafePut(invoke->GetLocations(),
992 Primitive::kPrimLong,
993 /* is_volatile */ false,
994 /* is_ordered */ true,
995 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800996}
997void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000998 GenUnsafePut(invoke->GetLocations(),
999 Primitive::kPrimLong,
1000 /* is_volatile */ true,
1001 /* is_ordered */ false,
1002 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001003}
1004
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001005static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
1006 HInvoke* invoke,
1007 Primitive::Type type) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001008 LocationSummary* locations = new (arena) LocationSummary(invoke,
1009 LocationSummary::kNoCall,
1010 kIntrinsified);
1011 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1012 locations->SetInAt(1, Location::RequiresRegister());
1013 locations->SetInAt(2, Location::RequiresRegister());
1014 locations->SetInAt(3, Location::RequiresRegister());
1015 locations->SetInAt(4, Location::RequiresRegister());
1016
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001017 // If heap poisoning is enabled, we don't want the unpoisoning
1018 // operations to potentially clobber the output.
1019 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
1020 ? Location::kOutputOverlap
1021 : Location::kNoOutputOverlap;
1022 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001023}
1024
1025static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001026 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1027
1028 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1029
1030 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1031 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1032 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1033 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1034
1035 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1036 if (type == Primitive::kPrimNot) {
1037 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001038 bool value_can_be_null = true; // TODO: Worth finding out this information?
1039 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001040 }
1041
1042 UseScratchRegisterScope temps(masm);
1043 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1044 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1045
1046 Register tmp_32 = tmp_value.W();
1047
1048 __ Add(tmp_ptr, base.X(), Operand(offset));
1049
Roland Levillain4d027112015-07-01 15:41:14 +01001050 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1051 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001052 if (value.Is(expected)) {
1053 // Do not poison `value`, as it is the same register as
1054 // `expected`, which has just been poisoned.
1055 } else {
1056 codegen->GetAssembler()->PoisonHeapReference(value);
1057 }
Roland Levillain4d027112015-07-01 15:41:14 +01001058 }
1059
Andreas Gampe878d58c2015-01-15 23:24:00 -08001060 // do {
1061 // tmp_value = [tmp_ptr] - expected;
1062 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1063 // result = tmp_value != 0;
1064
1065 vixl::Label loop_head, exit_loop;
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001066 __ Bind(&loop_head);
1067 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1068 // the reference stored in the object before attempting the CAS,
1069 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1070 // implementation.
1071 //
1072 // Note that this code is not (yet) used when read barriers are
1073 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
1074 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1075 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1076 __ Cmp(tmp_value, expected);
1077 __ B(&exit_loop, ne);
1078 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1079 __ Cbnz(tmp_32, &loop_head);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001080 __ Bind(&exit_loop);
1081 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001082
1083 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001084 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001085 if (value.Is(expected)) {
1086 // Do not unpoison `value`, as it is the same register as
1087 // `expected`, which has just been unpoisoned.
1088 } else {
1089 codegen->GetAssembler()->UnpoisonHeapReference(value);
1090 }
Roland Levillain4d027112015-07-01 15:41:14 +01001091 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001092}
1093
1094void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001095 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001096}
1097void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001098 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001099}
1100void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00001101 // The UnsafeCASObject intrinsic is missing a read barrier, and
1102 // therefore sometimes does not work as expected (b/25883050).
1103 // Turn it off temporarily as a quick fix, until the read barrier is
1104 // implemented (see TODO in GenCAS below).
1105 //
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001106 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
1107 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001108 return;
1109 }
1110
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001111 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001112}
1113
1114void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1115 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1116}
1117void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1118 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1119}
1120void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1121 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1122}
1123
1124void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001125 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1126 LocationSummary::kCallOnSlowPath,
1127 kIntrinsified);
1128 locations->SetInAt(0, Location::RequiresRegister());
1129 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001130 // In case we need to go in the slow path, we can't have the output be the same
1131 // as the input: the current liveness analysis considers the input to be live
1132 // at the point of the call.
1133 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001134}
1135
1136void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1137 vixl::MacroAssembler* masm = GetVIXLAssembler();
1138 LocationSummary* locations = invoke->GetLocations();
1139
1140 // Location of reference to data array
1141 const MemberOffset value_offset = mirror::String::ValueOffset();
1142 // Location of count
1143 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001144
1145 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1146 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1147 Register out = WRegisterFrom(locations->Out()); // Result character.
1148
1149 UseScratchRegisterScope temps(masm);
1150 Register temp = temps.AcquireW();
1151 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1152
1153 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1154 // the cost.
1155 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1156 // we will not optimize the code for constants (which would save a register).
1157
1158 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1159 codegen_->AddSlowPath(slow_path);
1160
1161 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1162 codegen_->MaybeRecordImplicitNullCheck(invoke);
1163 __ Cmp(idx, temp);
1164 __ B(hs, slow_path->GetEntryLabel());
1165
Jeff Hao848f70a2014-01-15 13:49:50 -08001166 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001167
1168 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001169 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001170
1171 __ Bind(slow_path->GetExitLabel());
1172}
1173
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001174void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001175 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1176 LocationSummary::kCall,
1177 kIntrinsified);
1178 InvokeRuntimeCallingConvention calling_convention;
1179 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1180 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1181 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1182}
1183
1184void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1185 vixl::MacroAssembler* masm = GetVIXLAssembler();
1186 LocationSummary* locations = invoke->GetLocations();
1187
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001188 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001189 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001190
1191 Register argument = WRegisterFrom(locations->InAt(1));
1192 __ Cmp(argument, 0);
1193 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1194 codegen_->AddSlowPath(slow_path);
1195 __ B(eq, slow_path->GetEntryLabel());
1196
1197 __ Ldr(
1198 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1199 __ Blr(lr);
1200 __ Bind(slow_path->GetExitLabel());
1201}
1202
Agi Csakiea34b402015-08-13 17:51:19 -07001203void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1204 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1205 LocationSummary::kNoCall,
1206 kIntrinsified);
1207 locations->SetInAt(0, Location::RequiresRegister());
1208 locations->SetInAt(1, Location::RequiresRegister());
1209 // Temporary registers to store lengths of strings and for calculations.
1210 locations->AddTemp(Location::RequiresRegister());
1211 locations->AddTemp(Location::RequiresRegister());
1212
1213 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1214}
1215
1216void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1217 vixl::MacroAssembler* masm = GetVIXLAssembler();
1218 LocationSummary* locations = invoke->GetLocations();
1219
1220 Register str = WRegisterFrom(locations->InAt(0));
1221 Register arg = WRegisterFrom(locations->InAt(1));
1222 Register out = XRegisterFrom(locations->Out());
1223
1224 UseScratchRegisterScope scratch_scope(masm);
1225 Register temp = scratch_scope.AcquireW();
1226 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1227 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1228
1229 vixl::Label loop;
1230 vixl::Label end;
1231 vixl::Label return_true;
1232 vixl::Label return_false;
1233
1234 // Get offsets of count, value, and class fields within a string object.
1235 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1236 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1237 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1238
1239 // Note that the null check must have been done earlier.
1240 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1241
1242 // Check if input is null, return false if it is.
1243 __ Cbz(arg, &return_false);
1244
1245 // Reference equality check, return true if same reference.
1246 __ Cmp(str, arg);
1247 __ B(&return_true, eq);
1248
1249 // Instanceof check for the argument by comparing class fields.
1250 // All string objects must have the same type since String cannot be subclassed.
1251 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1252 // If the argument is a string object, its class field must be equal to receiver's class field.
1253 __ Ldr(temp, MemOperand(str.X(), class_offset));
1254 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1255 __ Cmp(temp, temp1);
1256 __ B(&return_false, ne);
1257
1258 // Load lengths of this and argument strings.
1259 __ Ldr(temp, MemOperand(str.X(), count_offset));
1260 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1261 // Check if lengths are equal, return false if they're not.
1262 __ Cmp(temp, temp1);
1263 __ B(&return_false, ne);
1264 // Store offset of string value in preparation for comparison loop
1265 __ Mov(temp1, value_offset);
1266 // Return true if both strings are empty.
1267 __ Cbz(temp, &return_true);
1268
1269 // Assertions that must hold in order to compare strings 4 characters at a time.
1270 DCHECK_ALIGNED(value_offset, 8);
1271 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1272
1273 temp1 = temp1.X();
1274 temp2 = temp2.X();
1275
1276 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1277 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1278 __ Bind(&loop);
1279 __ Ldr(out, MemOperand(str.X(), temp1));
1280 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1281 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1282 __ Cmp(out, temp2);
1283 __ B(&return_false, ne);
1284 __ Sub(temp, temp, Operand(4), SetFlags);
1285 __ B(&loop, gt);
1286
1287 // Return true and exit the function.
1288 // If loop does not result in returning false, we return true.
1289 __ Bind(&return_true);
1290 __ Mov(out, 1);
1291 __ B(&end);
1292
1293 // Return false and exit the function.
1294 __ Bind(&return_false);
1295 __ Mov(out, 0);
1296 __ Bind(&end);
1297}
1298
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001299static void GenerateVisitStringIndexOf(HInvoke* invoke,
1300 vixl::MacroAssembler* masm,
1301 CodeGeneratorARM64* codegen,
1302 ArenaAllocator* allocator,
1303 bool start_at_zero) {
1304 LocationSummary* locations = invoke->GetLocations();
1305 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1306
1307 // Note that the null check must have been done earlier.
1308 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1309
1310 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1311 // or directly dispatch if we have a constant.
1312 SlowPathCodeARM64* slow_path = nullptr;
1313 if (invoke->InputAt(1)->IsIntConstant()) {
1314 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1315 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1316 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1317 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1318 codegen->AddSlowPath(slow_path);
1319 __ B(slow_path->GetEntryLabel());
1320 __ Bind(slow_path->GetExitLabel());
1321 return;
1322 }
1323 } else {
1324 Register char_reg = WRegisterFrom(locations->InAt(1));
1325 __ Mov(tmp_reg, 0xFFFF);
1326 __ Cmp(char_reg, Operand(tmp_reg));
1327 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1328 codegen->AddSlowPath(slow_path);
1329 __ B(hi, slow_path->GetEntryLabel());
1330 }
1331
1332 if (start_at_zero) {
1333 // Start-index = 0.
1334 __ Mov(tmp_reg, 0);
1335 }
1336
1337 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
Roland Levillain42ad2882016-02-29 18:26:54 +00001338 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001339 __ Blr(lr);
1340
1341 if (slow_path != nullptr) {
1342 __ Bind(slow_path->GetExitLabel());
1343 }
1344}
1345
1346void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1347 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1348 LocationSummary::kCall,
1349 kIntrinsified);
1350 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1351 // best to align the inputs accordingly.
1352 InvokeRuntimeCallingConvention calling_convention;
1353 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1354 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1355 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1356
1357 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1358 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1359}
1360
1361void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001362 GenerateVisitStringIndexOf(
1363 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001364}
1365
1366void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1367 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1368 LocationSummary::kCall,
1369 kIntrinsified);
1370 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1371 // best to align the inputs accordingly.
1372 InvokeRuntimeCallingConvention calling_convention;
1373 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1374 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1375 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1376 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1377
1378 // Need a temp for slow-path codepoint compare.
1379 locations->AddTemp(Location::RequiresRegister());
1380}
1381
1382void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001383 GenerateVisitStringIndexOf(
1384 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001385}
1386
Jeff Hao848f70a2014-01-15 13:49:50 -08001387void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1388 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1389 LocationSummary::kCall,
1390 kIntrinsified);
1391 InvokeRuntimeCallingConvention calling_convention;
1392 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1393 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1394 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1395 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1396 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1397}
1398
1399void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1400 vixl::MacroAssembler* masm = GetVIXLAssembler();
1401 LocationSummary* locations = invoke->GetLocations();
1402
1403 Register byte_array = WRegisterFrom(locations->InAt(0));
1404 __ Cmp(byte_array, 0);
1405 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1406 codegen_->AddSlowPath(slow_path);
1407 __ B(eq, slow_path->GetEntryLabel());
1408
1409 __ Ldr(lr,
1410 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001411 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001412 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001413 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001414 __ Bind(slow_path->GetExitLabel());
1415}
1416
1417void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1418 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1419 LocationSummary::kCall,
1420 kIntrinsified);
1421 InvokeRuntimeCallingConvention calling_convention;
1422 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1423 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1424 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1425 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1426}
1427
1428void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1429 vixl::MacroAssembler* masm = GetVIXLAssembler();
1430
Roland Levillaincc3839c2016-02-29 16:23:48 +00001431 // No need to emit code checking whether `locations->InAt(2)` is a null
1432 // pointer, as callers of the native method
1433 //
1434 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1435 //
1436 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001437 __ Ldr(lr,
1438 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001439 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001440 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001441 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001442}
1443
1444void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001445 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1446 LocationSummary::kCall,
1447 kIntrinsified);
1448 InvokeRuntimeCallingConvention calling_convention;
1449 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Jeff Hao848f70a2014-01-15 13:49:50 -08001450 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1451}
1452
1453void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1454 vixl::MacroAssembler* masm = GetVIXLAssembler();
1455 LocationSummary* locations = invoke->GetLocations();
1456
1457 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1458 __ Cmp(string_to_copy, 0);
1459 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1460 codegen_->AddSlowPath(slow_path);
1461 __ B(eq, slow_path->GetEntryLabel());
1462
1463 __ Ldr(lr,
1464 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001465 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001466 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001467 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001468 __ Bind(slow_path->GetExitLabel());
1469}
1470
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001471static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1472 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1473 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1474 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1475
1476 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1477 LocationSummary::kCall,
1478 kIntrinsified);
1479 InvokeRuntimeCallingConvention calling_convention;
1480
1481 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1482 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1483}
1484
1485static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1486 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1487 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1488 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
1489 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1490
1491 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1492 LocationSummary::kCall,
1493 kIntrinsified);
1494 InvokeRuntimeCallingConvention calling_convention;
1495
1496 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1497 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
1498 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1499}
1500
1501static void GenFPToFPCall(HInvoke* invoke,
1502 vixl::MacroAssembler* masm,
1503 CodeGeneratorARM64* codegen,
1504 QuickEntrypointEnum entry) {
1505 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64WordSize>(entry).Int32Value()));
1506 __ Blr(lr);
1507 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1508}
1509
1510void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
1511 CreateFPToFPCallLocations(arena_, invoke);
1512}
1513
1514void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) {
1515 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCos);
1516}
1517
1518void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) {
1519 CreateFPToFPCallLocations(arena_, invoke);
1520}
1521
1522void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) {
1523 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSin);
1524}
1525
1526void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) {
1527 CreateFPToFPCallLocations(arena_, invoke);
1528}
1529
1530void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) {
1531 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAcos);
1532}
1533
1534void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) {
1535 CreateFPToFPCallLocations(arena_, invoke);
1536}
1537
1538void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) {
1539 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAsin);
1540}
1541
1542void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) {
1543 CreateFPToFPCallLocations(arena_, invoke);
1544}
1545
1546void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) {
1547 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan);
1548}
1549
1550void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) {
1551 CreateFPToFPCallLocations(arena_, invoke);
1552}
1553
1554void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) {
1555 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCbrt);
1556}
1557
1558void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) {
1559 CreateFPToFPCallLocations(arena_, invoke);
1560}
1561
1562void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) {
1563 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCosh);
1564}
1565
1566void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) {
1567 CreateFPToFPCallLocations(arena_, invoke);
1568}
1569
1570void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) {
1571 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExp);
1572}
1573
1574void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) {
1575 CreateFPToFPCallLocations(arena_, invoke);
1576}
1577
1578void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) {
1579 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExpm1);
1580}
1581
1582void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) {
1583 CreateFPToFPCallLocations(arena_, invoke);
1584}
1585
1586void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) {
1587 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog);
1588}
1589
1590void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) {
1591 CreateFPToFPCallLocations(arena_, invoke);
1592}
1593
1594void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) {
1595 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog10);
1596}
1597
1598void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) {
1599 CreateFPToFPCallLocations(arena_, invoke);
1600}
1601
1602void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) {
1603 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSinh);
1604}
1605
1606void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) {
1607 CreateFPToFPCallLocations(arena_, invoke);
1608}
1609
1610void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) {
1611 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTan);
1612}
1613
1614void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) {
1615 CreateFPToFPCallLocations(arena_, invoke);
1616}
1617
1618void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) {
1619 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTanh);
1620}
1621
1622void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) {
1623 CreateFPFPToFPCallLocations(arena_, invoke);
1624}
1625
1626void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) {
1627 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan2);
1628}
1629
1630void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) {
1631 CreateFPFPToFPCallLocations(arena_, invoke);
1632}
1633
1634void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) {
1635 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickHypot);
1636}
1637
1638void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) {
1639 CreateFPFPToFPCallLocations(arena_, invoke);
1640}
1641
1642void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) {
1643 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickNextAfter);
1644}
1645
Tim Zhang25abd6c2016-01-19 23:39:24 +08001646void IntrinsicLocationsBuilderARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1647 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1648 LocationSummary::kNoCall,
1649 kIntrinsified);
1650 locations->SetInAt(0, Location::RequiresRegister());
1651 locations->SetInAt(1, Location::RequiresRegister());
1652 locations->SetInAt(2, Location::RequiresRegister());
1653 locations->SetInAt(3, Location::RequiresRegister());
1654 locations->SetInAt(4, Location::RequiresRegister());
1655
1656 locations->AddTemp(Location::RequiresRegister());
1657 locations->AddTemp(Location::RequiresRegister());
1658}
1659
1660void IntrinsicCodeGeneratorARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1661 vixl::MacroAssembler* masm = GetVIXLAssembler();
1662 LocationSummary* locations = invoke->GetLocations();
1663
1664 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1665 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1666 DCHECK_EQ(char_size, 2u);
1667
1668 // Location of data in char array buffer.
1669 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1670
1671 // Location of char array data in string.
1672 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1673
1674 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1675 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
1676 Register srcObj = XRegisterFrom(locations->InAt(0));
1677 Register srcBegin = XRegisterFrom(locations->InAt(1));
1678 Register srcEnd = XRegisterFrom(locations->InAt(2));
1679 Register dstObj = XRegisterFrom(locations->InAt(3));
1680 Register dstBegin = XRegisterFrom(locations->InAt(4));
1681
1682 Register src_ptr = XRegisterFrom(locations->GetTemp(0));
1683 Register src_ptr_end = XRegisterFrom(locations->GetTemp(1));
1684
1685 UseScratchRegisterScope temps(masm);
1686 Register dst_ptr = temps.AcquireX();
1687 Register tmp = temps.AcquireW();
1688
1689 // src range to copy.
1690 __ Add(src_ptr, srcObj, Operand(value_offset));
1691 __ Add(src_ptr_end, src_ptr, Operand(srcEnd, LSL, 1));
1692 __ Add(src_ptr, src_ptr, Operand(srcBegin, LSL, 1));
1693
1694 // dst to be copied.
1695 __ Add(dst_ptr, dstObj, Operand(data_offset));
1696 __ Add(dst_ptr, dst_ptr, Operand(dstBegin, LSL, 1));
1697
1698 // Do the copy.
1699 vixl::Label loop, done;
1700 __ Bind(&loop);
1701 __ Cmp(src_ptr, src_ptr_end);
1702 __ B(&done, eq);
1703 __ Ldrh(tmp, MemOperand(src_ptr, char_size, vixl::PostIndex));
1704 __ Strh(tmp, MemOperand(dst_ptr, char_size, vixl::PostIndex));
1705 __ B(&loop);
1706 __ Bind(&done);
1707}
1708
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001709// Mirrors ARRAYCOPY_SHORT_CHAR_ARRAY_THRESHOLD in libcore, so we can choose to use the native
1710// implementation there for longer copy lengths.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001711static constexpr int32_t kSystemArrayCopyCharThreshold = 32;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001712
1713static void SetSystemArrayCopyLocationRequires(LocationSummary* locations,
1714 uint32_t at,
1715 HInstruction* input) {
1716 HIntConstant* const_input = input->AsIntConstant();
1717 if (const_input != nullptr && !vixl::Assembler::IsImmAddSub(const_input->GetValue())) {
1718 locations->SetInAt(at, Location::RequiresRegister());
1719 } else {
1720 locations->SetInAt(at, Location::RegisterOrConstant(input));
1721 }
1722}
1723
1724void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1725 // Check to see if we have known failures that will cause us to have to bail out
1726 // to the runtime, and just generate the runtime call directly.
1727 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1728 HIntConstant* dst_pos = invoke->InputAt(3)->AsIntConstant();
1729
1730 // The positions must be non-negative.
1731 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1732 (dst_pos != nullptr && dst_pos->GetValue() < 0)) {
1733 // We will have to fail anyways.
1734 return;
1735 }
1736
1737 // The length must be >= 0 and not so long that we would (currently) prefer libcore's
1738 // native implementation.
1739 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1740 if (length != nullptr) {
1741 int32_t len = length->GetValue();
donghui.baic2ec9ad2016-03-10 14:02:55 +08001742 if (len < 0 || len > kSystemArrayCopyCharThreshold) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001743 // Just call as normal.
1744 return;
1745 }
1746 }
1747
1748 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
1749 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1750 LocationSummary::kCallOnSlowPath,
1751 kIntrinsified);
1752 // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length).
1753 locations->SetInAt(0, Location::RequiresRegister());
1754 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
1755 locations->SetInAt(2, Location::RequiresRegister());
1756 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
1757 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
1758
1759 locations->AddTemp(Location::RequiresRegister());
1760 locations->AddTemp(Location::RequiresRegister());
1761 locations->AddTemp(Location::RequiresRegister());
1762}
1763
1764static void CheckSystemArrayCopyPosition(vixl::MacroAssembler* masm,
1765 const Location& pos,
1766 const Register& input,
1767 const Location& length,
1768 SlowPathCodeARM64* slow_path,
1769 const Register& input_len,
1770 const Register& temp,
1771 bool length_is_input_length = false) {
1772 const int32_t length_offset = mirror::Array::LengthOffset().Int32Value();
1773 if (pos.IsConstant()) {
1774 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1775 if (pos_const == 0) {
1776 if (!length_is_input_length) {
1777 // Check that length(input) >= length.
1778 __ Ldr(temp, MemOperand(input, length_offset));
1779 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1780 __ B(slow_path->GetEntryLabel(), lt);
1781 }
1782 } else {
1783 // Check that length(input) >= pos.
1784 __ Ldr(input_len, MemOperand(input, length_offset));
1785 __ Subs(temp, input_len, pos_const);
1786 __ B(slow_path->GetEntryLabel(), lt);
1787
1788 // Check that (length(input) - pos) >= length.
1789 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1790 __ B(slow_path->GetEntryLabel(), lt);
1791 }
1792 } else if (length_is_input_length) {
1793 // The only way the copy can succeed is if pos is zero.
1794 __ Cbnz(WRegisterFrom(pos), slow_path->GetEntryLabel());
1795 } else {
1796 // Check that pos >= 0.
1797 Register pos_reg = WRegisterFrom(pos);
1798 __ Tbnz(pos_reg, pos_reg.size() - 1, slow_path->GetEntryLabel());
1799
1800 // Check that pos <= length(input) && (length(input) - pos) >= length.
1801 __ Ldr(temp, MemOperand(input, length_offset));
1802 __ Subs(temp, temp, pos_reg);
1803 // Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt).
1804 __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge);
1805 __ B(slow_path->GetEntryLabel(), lt);
1806 }
1807}
1808
1809// Compute base source address, base destination address, and end source address
1810// for System.arraycopy* intrinsics.
1811static void GenSystemArrayCopyAddresses(vixl::MacroAssembler* masm,
1812 Primitive::Type type,
1813 const Register& src,
1814 const Location& src_pos,
1815 const Register& dst,
1816 const Location& dst_pos,
1817 const Location& copy_length,
1818 const Register& src_base,
1819 const Register& dst_base,
1820 const Register& src_end) {
1821 DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar)
1822 << "Unexpected element type: "
1823 << type;
1824 const int32_t char_size = Primitive::ComponentSize(type);
1825 const int32_t char_size_shift = Primitive::ComponentSizeShift(type);
1826
1827 uint32_t offset = mirror::Array::DataOffset(char_size).Uint32Value();
1828 if (src_pos.IsConstant()) {
1829 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1830 __ Add(src_base, src, char_size * constant + offset);
1831 } else {
1832 __ Add(src_base, src, offset);
1833 __ Add(src_base,
1834 src_base,
1835 Operand(XRegisterFrom(src_pos), LSL, char_size_shift));
1836 }
1837
1838 if (dst_pos.IsConstant()) {
1839 int32_t constant = dst_pos.GetConstant()->AsIntConstant()->GetValue();
1840 __ Add(dst_base, dst, char_size * constant + offset);
1841 } else {
1842 __ Add(dst_base, dst, offset);
1843 __ Add(dst_base,
1844 dst_base,
1845 Operand(XRegisterFrom(dst_pos), LSL, char_size_shift));
1846 }
1847
1848 if (copy_length.IsConstant()) {
1849 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
1850 __ Add(src_end, src_base, char_size * constant);
1851 } else {
1852 __ Add(src_end,
1853 src_base,
1854 Operand(XRegisterFrom(copy_length), LSL, char_size_shift));
1855 }
1856}
1857
1858void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1859 vixl::MacroAssembler* masm = GetVIXLAssembler();
1860 LocationSummary* locations = invoke->GetLocations();
1861 Register src = XRegisterFrom(locations->InAt(0));
1862 Location src_pos = locations->InAt(1);
1863 Register dst = XRegisterFrom(locations->InAt(2));
1864 Location dst_pos = locations->InAt(3);
1865 Location length = locations->InAt(4);
1866
1867 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1868 codegen_->AddSlowPath(slow_path);
1869
1870 // If source and destination are the same, take the slow path. Overlapping copy regions must be
1871 // copied in reverse and we can't know in all cases if it's needed.
1872 __ Cmp(src, dst);
1873 __ B(slow_path->GetEntryLabel(), eq);
1874
1875 // Bail out if the source is null.
1876 __ Cbz(src, slow_path->GetEntryLabel());
1877
1878 // Bail out if the destination is null.
1879 __ Cbz(dst, slow_path->GetEntryLabel());
1880
1881 if (!length.IsConstant()) {
1882 // If the length is negative, bail out.
1883 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
1884 // If the length > 32 then (currently) prefer libcore's native implementation.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001885 __ Cmp(WRegisterFrom(length), kSystemArrayCopyCharThreshold);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001886 __ B(slow_path->GetEntryLabel(), gt);
1887 } else {
1888 // We have already checked in the LocationsBuilder for the constant case.
1889 DCHECK_GE(length.GetConstant()->AsIntConstant()->GetValue(), 0);
1890 DCHECK_LE(length.GetConstant()->AsIntConstant()->GetValue(), 32);
1891 }
1892
1893 Register src_curr_addr = WRegisterFrom(locations->GetTemp(0));
1894 Register dst_curr_addr = WRegisterFrom(locations->GetTemp(1));
1895 Register src_stop_addr = WRegisterFrom(locations->GetTemp(2));
1896
1897 CheckSystemArrayCopyPosition(masm,
1898 src_pos,
1899 src,
1900 length,
1901 slow_path,
1902 src_curr_addr,
1903 dst_curr_addr,
1904 false);
1905
1906 CheckSystemArrayCopyPosition(masm,
1907 dst_pos,
1908 dst,
1909 length,
1910 slow_path,
1911 src_curr_addr,
1912 dst_curr_addr,
1913 false);
1914
1915 src_curr_addr = src_curr_addr.X();
1916 dst_curr_addr = dst_curr_addr.X();
1917 src_stop_addr = src_stop_addr.X();
1918
1919 GenSystemArrayCopyAddresses(masm,
1920 Primitive::kPrimChar,
1921 src,
1922 src_pos,
1923 dst,
1924 dst_pos,
1925 length,
1926 src_curr_addr,
1927 dst_curr_addr,
1928 src_stop_addr);
1929
1930 // Iterate over the arrays and do a raw copy of the chars.
1931 const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1932 UseScratchRegisterScope temps(masm);
1933 Register tmp = temps.AcquireW();
1934 vixl::Label loop, done;
1935 __ Bind(&loop);
1936 __ Cmp(src_curr_addr, src_stop_addr);
1937 __ B(&done, eq);
1938 __ Ldrh(tmp, MemOperand(src_curr_addr, char_size, vixl::PostIndex));
1939 __ Strh(tmp, MemOperand(dst_curr_addr, char_size, vixl::PostIndex));
1940 __ B(&loop);
1941 __ Bind(&done);
1942
1943 __ Bind(slow_path->GetExitLabel());
1944}
1945
donghui.baic2ec9ad2016-03-10 14:02:55 +08001946// We can choose to use the native implementation there for longer copy lengths.
1947static constexpr int32_t kSystemArrayCopyThreshold = 128;
1948
1949// CodeGenerator::CreateSystemArrayCopyLocationSummary use three temporary registers.
1950// We want to use two temporary registers in order to reduce the register pressure in arm64.
1951// So we don't use the CodeGenerator::CreateSystemArrayCopyLocationSummary.
1952void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopy(HInvoke* invoke) {
1953 // Check to see if we have known failures that will cause us to have to bail out
1954 // to the runtime, and just generate the runtime call directly.
1955 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1956 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1957
1958 // The positions must be non-negative.
1959 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1960 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1961 // We will have to fail anyways.
1962 return;
1963 }
1964
1965 // The length must be >= 0.
1966 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1967 if (length != nullptr) {
1968 int32_t len = length->GetValue();
1969 if (len < 0 || len >= kSystemArrayCopyThreshold) {
1970 // Just call as normal.
1971 return;
1972 }
1973 }
1974
1975 SystemArrayCopyOptimizations optimizations(invoke);
1976
1977 if (optimizations.GetDestinationIsSource()) {
1978 if (src_pos != nullptr && dest_pos != nullptr && src_pos->GetValue() < dest_pos->GetValue()) {
1979 // We only support backward copying if source and destination are the same.
1980 return;
1981 }
1982 }
1983
1984 if (optimizations.GetDestinationIsPrimitiveArray() || optimizations.GetSourceIsPrimitiveArray()) {
1985 // We currently don't intrinsify primitive copying.
1986 return;
1987 }
1988
1989 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
1990 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1991 LocationSummary::kCallOnSlowPath,
1992 kIntrinsified);
1993 // arraycopy(Object src, int src_pos, Object dest, int dest_pos, int length).
1994 locations->SetInAt(0, Location::RequiresRegister());
1995 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
1996 locations->SetInAt(2, Location::RequiresRegister());
1997 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
1998 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
1999
2000 locations->AddTemp(Location::RequiresRegister());
2001 locations->AddTemp(Location::RequiresRegister());
2002}
2003
2004void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
2005 vixl::MacroAssembler* masm = GetVIXLAssembler();
2006 LocationSummary* locations = invoke->GetLocations();
2007
2008 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2009 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2010 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2011 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2012
2013 Register src = XRegisterFrom(locations->InAt(0));
2014 Location src_pos = locations->InAt(1);
2015 Register dest = XRegisterFrom(locations->InAt(2));
2016 Location dest_pos = locations->InAt(3);
2017 Location length = locations->InAt(4);
2018 Register temp1 = WRegisterFrom(locations->GetTemp(0));
2019 Register temp2 = WRegisterFrom(locations->GetTemp(1));
2020
2021 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2022 codegen_->AddSlowPath(slow_path);
2023
2024 vixl::Label conditions_on_positions_validated;
2025 SystemArrayCopyOptimizations optimizations(invoke);
2026
2027 if (!optimizations.GetDestinationIsSource() &&
2028 (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
2029 __ Cmp(src, dest);
2030 }
2031 // If source and destination are the same, we go to slow path if we need to do
2032 // forward copying.
2033 if (src_pos.IsConstant()) {
2034 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2035 if (dest_pos.IsConstant()) {
2036 // Checked when building locations.
2037 DCHECK(!optimizations.GetDestinationIsSource()
2038 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
2039 } else {
2040 if (!optimizations.GetDestinationIsSource()) {
2041 __ B(&conditions_on_positions_validated, ne);
2042 }
2043 __ Cmp(WRegisterFrom(dest_pos), src_pos_constant);
2044 __ B(slow_path->GetEntryLabel(), gt);
2045 }
2046 } else {
2047 if (!optimizations.GetDestinationIsSource()) {
2048 __ B(&conditions_on_positions_validated, ne);
2049 }
2050 __ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()),
2051 OperandFrom(dest_pos, invoke->InputAt(3)->GetType()));
2052 __ B(slow_path->GetEntryLabel(), lt);
2053 }
2054
2055 __ Bind(&conditions_on_positions_validated);
2056
2057 if (!optimizations.GetSourceIsNotNull()) {
2058 // Bail out if the source is null.
2059 __ Cbz(src, slow_path->GetEntryLabel());
2060 }
2061
2062 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2063 // Bail out if the destination is null.
2064 __ Cbz(dest, slow_path->GetEntryLabel());
2065 }
2066
2067 // We have already checked in the LocationsBuilder for the constant case.
2068 if (!length.IsConstant() &&
2069 !optimizations.GetCountIsSourceLength() &&
2070 !optimizations.GetCountIsDestinationLength()) {
2071 // If the length is negative, bail out.
2072 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
2073 // If the length >= 128 then (currently) prefer native implementation.
2074 __ Cmp(WRegisterFrom(length), kSystemArrayCopyThreshold);
2075 __ B(slow_path->GetEntryLabel(), ge);
2076 }
2077 // Validity checks: source.
2078 CheckSystemArrayCopyPosition(masm,
2079 src_pos,
2080 src,
2081 length,
2082 slow_path,
2083 temp1,
2084 temp2,
2085 optimizations.GetCountIsSourceLength());
2086
2087 // Validity checks: dest.
2088 CheckSystemArrayCopyPosition(masm,
2089 dest_pos,
2090 dest,
2091 length,
2092 slow_path,
2093 temp1,
2094 temp2,
2095 optimizations.GetCountIsDestinationLength());
2096 {
2097 // We use a block to end the scratch scope before the write barrier, thus
2098 // freeing the temporary registers so they can be used in `MarkGCCard`.
2099 UseScratchRegisterScope temps(masm);
2100 Register temp3 = temps.AcquireW();
2101 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2102 // Check whether all elements of the source array are assignable to the component
2103 // type of the destination array. We do two checks: the classes are the same,
2104 // or the destination is Object[]. If none of these checks succeed, we go to the
2105 // slow path.
2106 __ Ldr(temp1, MemOperand(dest, class_offset));
2107 __ Ldr(temp2, MemOperand(src, class_offset));
2108 bool did_unpoison = false;
2109 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
2110 !optimizations.GetSourceIsNonPrimitiveArray()) {
2111 // One or two of the references need to be unpoisoned. Unpoison them
2112 // both to make the identity check valid.
2113 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2114 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2);
2115 did_unpoison = true;
2116 }
2117
2118 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2119 // Bail out if the destination is not a non primitive array.
2120 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2121 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2122 __ Cbz(temp3, slow_path->GetEntryLabel());
2123 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2124 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2125 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2126 __ Cbnz(temp3, slow_path->GetEntryLabel());
2127 }
2128
2129 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2130 // Bail out if the source is not a non primitive array.
2131 // /* HeapReference<Class> */ temp3 = temp2->component_type_
2132 __ Ldr(temp3, HeapOperand(temp2, component_offset));
2133 __ Cbz(temp3, slow_path->GetEntryLabel());
2134 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2135 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2136 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2137 __ Cbnz(temp3, slow_path->GetEntryLabel());
2138 }
2139
2140 __ Cmp(temp1, temp2);
2141
2142 if (optimizations.GetDestinationIsTypedObjectArray()) {
2143 vixl::Label do_copy;
2144 __ B(&do_copy, eq);
2145 if (!did_unpoison) {
2146 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2147 }
2148 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2149 __ Ldr(temp1, HeapOperand(temp1, component_offset));
2150 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2151 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2152 __ Ldr(temp1, HeapOperand(temp1, super_offset));
2153 // No need to unpoison the result, we're comparing against null.
2154 __ Cbnz(temp1, slow_path->GetEntryLabel());
2155 __ Bind(&do_copy);
2156 } else {
2157 __ B(slow_path->GetEntryLabel(), ne);
2158 }
2159 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2160 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2161 // Bail out if the source is not a non primitive array.
2162 // /* HeapReference<Class> */ temp1 = src->klass_
2163 __ Ldr(temp1, HeapOperand(src.W(), class_offset));
2164 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2165 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2166 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2167 __ Cbz(temp3, slow_path->GetEntryLabel());
2168 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2169 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2170 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2171 __ Cbnz(temp3, slow_path->GetEntryLabel());
2172 }
2173
2174 Register src_curr_addr = temp1.X();
2175 Register dst_curr_addr = temp2.X();
2176 Register src_stop_addr = temp3.X();
2177
2178 GenSystemArrayCopyAddresses(masm,
2179 Primitive::kPrimNot,
2180 src,
2181 src_pos,
2182 dest,
2183 dest_pos,
2184 length,
2185 src_curr_addr,
2186 dst_curr_addr,
2187 src_stop_addr);
2188
2189 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2190 // poison/unpoison, nor do any read barrier as the next uses of the destination
2191 // array will do it.
2192 vixl::Label loop, done;
2193 const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
2194 __ Bind(&loop);
2195 __ Cmp(src_curr_addr, src_stop_addr);
2196 __ B(&done, eq);
2197 {
2198 Register tmp = temps.AcquireW();
2199 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, vixl::PostIndex));
2200 __ Str(tmp, MemOperand(dst_curr_addr, element_size, vixl::PostIndex));
2201 }
2202 __ B(&loop);
2203 __ Bind(&done);
2204 }
2205 // We only need one card marking on the destination array.
2206 codegen_->MarkGCCard(dest.W(), Register(), /* value_can_be_null */ false);
2207
2208 __ Bind(slow_path->GetExitLabel());
2209}
2210
Aart Bik2f9fcc92016-03-01 15:16:54 -08002211UNIMPLEMENTED_INTRINSIC(ARM64, ReferenceGetReferent)
2212UNIMPLEMENTED_INTRINSIC(ARM64, FloatIsInfinite)
2213UNIMPLEMENTED_INTRINSIC(ARM64, DoubleIsInfinite)
2214UNIMPLEMENTED_INTRINSIC(ARM64, IntegerHighestOneBit)
2215UNIMPLEMENTED_INTRINSIC(ARM64, LongHighestOneBit)
2216UNIMPLEMENTED_INTRINSIC(ARM64, IntegerLowestOneBit)
2217UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit)
Andreas Gampe878d58c2015-01-15 23:24:00 -08002218
Aart Bik0e54c012016-03-04 12:08:31 -08002219// 1.8.
2220UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt)
2221UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddLong)
2222UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt)
2223UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong)
2224UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002225
Aart Bik2f9fcc92016-03-01 15:16:54 -08002226UNREACHABLE_INTRINSICS(ARM64)
Roland Levillain4d027112015-07-01 15:41:14 +01002227
2228#undef __
2229
Andreas Gampe878d58c2015-01-15 23:24:00 -08002230} // namespace arm64
2231} // namespace art