blob: 6c3938c1a91db8ad2e08a5bf87cf0dce4bd35a8f [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Vladimir Markoe39f14f2017-02-10 15:44:25 +000026#include "mirror/string-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080027#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080029
Scott Wakeling97c72b72016-06-24 16:19:36 +010030using namespace vixl::aarch64; // NOLINT(build/namespaces)
Andreas Gampe878d58c2015-01-15 23:24:00 -080031
Artem Serovaf4e42a2016-08-08 15:11:24 +010032// TODO(VIXL): Make VIXL compile with -Wshadow.
Scott Wakeling97c72b72016-06-24 16:19:36 +010033#pragma GCC diagnostic push
34#pragma GCC diagnostic ignored "-Wshadow"
Artem Serovaf4e42a2016-08-08 15:11:24 +010035#include "aarch64/disasm-aarch64.h"
36#include "aarch64/macro-assembler-aarch64.h"
Scott Wakeling97c72b72016-06-24 16:19:36 +010037#pragma GCC diagnostic pop
Andreas Gampe878d58c2015-01-15 23:24:00 -080038
39namespace art {
40
41namespace arm64 {
42
43using helpers::DRegisterFrom;
44using helpers::FPRegisterFrom;
45using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000046using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010047using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080048using helpers::RegisterFrom;
49using helpers::SRegisterFrom;
50using helpers::WRegisterFrom;
51using helpers::XRegisterFrom;
xueliang.zhong49924c92016-03-03 10:52:51 +000052using helpers::InputRegisterAt;
Scott Wakeling1f36f412016-04-21 11:13:45 +010053using helpers::OutputRegister;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054
Andreas Gampe878d58c2015-01-15 23:24:00 -080055namespace {
56
57ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
58 return MemOperand(XRegisterFrom(location), offset);
59}
60
61} // namespace
62
Scott Wakeling97c72b72016-06-24 16:19:36 +010063MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
Alexandre Rames087930f2016-08-02 13:45:28 +010064 return codegen_->GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -080065}
66
67ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
68 return codegen_->GetGraph()->GetArena();
69}
70
Alexandre Rames087930f2016-08-02 13:45:28 +010071#define __ codegen->GetVIXLAssembler()->
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
73static void MoveFromReturnRegister(Location trg,
74 Primitive::Type type,
75 CodeGeneratorARM64* codegen) {
76 if (!trg.IsValid()) {
77 DCHECK(type == Primitive::kPrimVoid);
78 return;
79 }
80
81 DCHECK_NE(type, Primitive::kPrimVoid);
82
Jeff Hao848f70a2014-01-15 13:49:50 -080083 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080084 Register trg_reg = RegisterFrom(trg, type);
85 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
86 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
87 } else {
88 FPRegister trg_reg = FPRegisterFrom(trg, type);
89 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
90 __ Fmov(trg_reg, res_reg);
91 }
92}
93
Roland Levillainec525fc2015-04-28 15:50:20 +010094static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010095 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010096 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080097}
98
99// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
100// call. This will copy the arguments into the positions for a regular call.
101//
102// Note: The actual parameters are required to be in the locations given by the invoke's location
103// summary. If an intrinsic modifies those locations before a slowpath call, they must be
104// restored!
105class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
106 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000107 explicit IntrinsicSlowPathARM64(HInvoke* invoke)
108 : SlowPathCodeARM64(invoke), invoke_(invoke) { }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
110 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
111 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
112 __ Bind(GetEntryLabel());
113
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000114 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115
Roland Levillainec525fc2015-04-28 15:50:20 +0100116 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117
Artem Serov914d7a82017-02-07 14:33:49 +0000118 {
119 // Ensure that between the BLR (emitted by Generate*Call) and RecordPcInfo there
120 // are no pools emitted.
121 vixl::EmissionCheckScope guard(codegen->GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
122 if (invoke_->IsInvokeStaticOrDirect()) {
123 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
124 LocationFrom(kArtMethodRegister));
125 } else {
126 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
127 }
128 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 }
130
131 // Copy the result back to the expected output.
132 Location out = invoke_->GetLocations()->Out();
133 if (out.IsValid()) {
134 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
135 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
136 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
137 }
138
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000139 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800140 __ B(GetExitLabel());
141 }
142
Alexandre Rames9931f312015-06-19 14:47:01 +0100143 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
144
Andreas Gampe878d58c2015-01-15 23:24:00 -0800145 private:
146 // The instruction where this slow path is happening.
147 HInvoke* const invoke_;
148
149 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
150};
151
Roland Levillain0b671c02016-08-19 12:02:34 +0100152// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
153class ReadBarrierSystemArrayCopySlowPathARM64 : public SlowPathCodeARM64 {
154 public:
155 ReadBarrierSystemArrayCopySlowPathARM64(HInstruction* instruction, Location tmp)
156 : SlowPathCodeARM64(instruction), tmp_(tmp) {
157 DCHECK(kEmitCompilerReadBarrier);
158 DCHECK(kUseBakerReadBarrier);
159 }
160
161 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
162 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
163 LocationSummary* locations = instruction_->GetLocations();
164 DCHECK(locations->CanCall());
165 DCHECK(instruction_->IsInvokeStaticOrDirect())
166 << "Unexpected instruction in read barrier arraycopy slow path: "
167 << instruction_->DebugName();
168 DCHECK(instruction_->GetLocations()->Intrinsified());
169 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
170
171 const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
172
173 Register src_curr_addr = XRegisterFrom(locations->GetTemp(0));
174 Register dst_curr_addr = XRegisterFrom(locations->GetTemp(1));
175 Register src_stop_addr = XRegisterFrom(locations->GetTemp(2));
176 Register tmp_reg = WRegisterFrom(tmp_);
177
178 __ Bind(GetEntryLabel());
179 vixl::aarch64::Label slow_copy_loop;
180 __ Bind(&slow_copy_loop);
181 __ Ldr(tmp_reg, MemOperand(src_curr_addr, element_size, PostIndex));
182 codegen->GetAssembler()->MaybeUnpoisonHeapReference(tmp_reg);
183 // TODO: Inline the mark bit check before calling the runtime?
184 // tmp_reg = ReadBarrier::Mark(tmp_reg);
185 // No need to save live registers; it's taken care of by the
186 // entrypoint. Also, there is no need to update the stack mask,
187 // as this runtime call will not trigger a garbage collection.
188 // (See ReadBarrierMarkSlowPathARM64::EmitNativeCode for more
189 // explanations.)
190 DCHECK_NE(tmp_.reg(), LR);
191 DCHECK_NE(tmp_.reg(), WSP);
192 DCHECK_NE(tmp_.reg(), WZR);
193 // IP0 is used internally by the ReadBarrierMarkRegX entry point
194 // as a temporary (and not preserved). It thus cannot be used by
195 // any live register in this slow path.
196 DCHECK_NE(LocationFrom(src_curr_addr).reg(), IP0);
197 DCHECK_NE(LocationFrom(dst_curr_addr).reg(), IP0);
198 DCHECK_NE(LocationFrom(src_stop_addr).reg(), IP0);
199 DCHECK_NE(tmp_.reg(), IP0);
200 DCHECK(0 <= tmp_.reg() && tmp_.reg() < kNumberOfWRegisters) << tmp_.reg();
201 int32_t entry_point_offset =
202 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(tmp_.reg());
203 // This runtime call does not require a stack map.
204 codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
205 codegen->GetAssembler()->MaybePoisonHeapReference(tmp_reg);
206 __ Str(tmp_reg, MemOperand(dst_curr_addr, element_size, PostIndex));
207 __ Cmp(src_curr_addr, src_stop_addr);
208 __ B(&slow_copy_loop, ne);
209 __ B(GetExitLabel());
210 }
211
212 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathARM64"; }
213
214 private:
215 Location tmp_;
216
217 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathARM64);
218};
Andreas Gampe878d58c2015-01-15 23:24:00 -0800219#undef __
220
221bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
222 Dispatch(invoke);
223 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000224 if (res == nullptr) {
225 return false;
226 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000227 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800228}
229
230#define __ masm->
231
232static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
233 LocationSummary* locations = new (arena) LocationSummary(invoke,
234 LocationSummary::kNoCall,
235 kIntrinsified);
236 locations->SetInAt(0, Location::RequiresFpuRegister());
237 locations->SetOut(Location::RequiresRegister());
238}
239
240static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
241 LocationSummary* locations = new (arena) LocationSummary(invoke,
242 LocationSummary::kNoCall,
243 kIntrinsified);
244 locations->SetInAt(0, Location::RequiresRegister());
245 locations->SetOut(Location::RequiresFpuRegister());
246}
247
Scott Wakeling97c72b72016-06-24 16:19:36 +0100248static void MoveFPToInt(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800249 Location input = locations->InAt(0);
250 Location output = locations->Out();
251 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
252 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
253}
254
Scott Wakeling97c72b72016-06-24 16:19:36 +0100255static void MoveIntToFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800256 Location input = locations->InAt(0);
257 Location output = locations->Out();
258 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
259 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
260}
261
262void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
263 CreateFPToIntLocations(arena_, invoke);
264}
265void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
266 CreateIntToFPLocations(arena_, invoke);
267}
268
269void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000270 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800271}
272void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000273 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800274}
275
276void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
277 CreateFPToIntLocations(arena_, invoke);
278}
279void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
280 CreateIntToFPLocations(arena_, invoke);
281}
282
283void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000284 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800285}
286void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000287 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800288}
289
290static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
291 LocationSummary* locations = new (arena) LocationSummary(invoke,
292 LocationSummary::kNoCall,
293 kIntrinsified);
294 locations->SetInAt(0, Location::RequiresRegister());
295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
296}
297
298static void GenReverseBytes(LocationSummary* locations,
299 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100300 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800301 Location in = locations->InAt(0);
302 Location out = locations->Out();
303
304 switch (type) {
305 case Primitive::kPrimShort:
306 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
307 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
308 break;
309 case Primitive::kPrimInt:
310 case Primitive::kPrimLong:
311 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
312 break;
313 default:
314 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
315 UNREACHABLE();
316 }
317}
318
319void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
320 CreateIntToIntLocations(arena_, invoke);
321}
322
323void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
324 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
325}
326
327void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
328 CreateIntToIntLocations(arena_, invoke);
329}
330
331void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
332 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
333}
334
335void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
336 CreateIntToIntLocations(arena_, invoke);
337}
338
339void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
340 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
341}
342
Aart Bik7b565022016-01-28 14:36:22 -0800343static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
344 LocationSummary* locations = new (arena) LocationSummary(invoke,
345 LocationSummary::kNoCall,
346 kIntrinsified);
347 locations->SetInAt(0, Location::RequiresRegister());
348 locations->SetInAt(1, Location::RequiresRegister());
349 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
350}
351
Scott Wakeling611d3392015-07-10 11:42:06 +0100352static void GenNumberOfLeadingZeros(LocationSummary* locations,
353 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100354 MacroAssembler* masm) {
Scott Wakeling611d3392015-07-10 11:42:06 +0100355 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
356
357 Location in = locations->InAt(0);
358 Location out = locations->Out();
359
360 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
361}
362
363void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
368 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
369}
370
371void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
372 CreateIntToIntLocations(arena_, invoke);
373}
374
375void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
376 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
377}
378
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100379static void GenNumberOfTrailingZeros(LocationSummary* locations,
380 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100381 MacroAssembler* masm) {
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100382 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
383
384 Location in = locations->InAt(0);
385 Location out = locations->Out();
386
387 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
388 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
389}
390
391void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
392 CreateIntToIntLocations(arena_, invoke);
393}
394
395void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
396 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
397}
398
399void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
400 CreateIntToIntLocations(arena_, invoke);
401}
402
403void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
404 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
405}
406
Andreas Gampe878d58c2015-01-15 23:24:00 -0800407static void GenReverse(LocationSummary* locations,
408 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100409 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800410 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
411
412 Location in = locations->InAt(0);
413 Location out = locations->Out();
414
415 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
416}
417
418void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
419 CreateIntToIntLocations(arena_, invoke);
420}
421
422void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
423 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
424}
425
426void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
427 CreateIntToIntLocations(arena_, invoke);
428}
429
430void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
431 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
432}
433
Scott Wakeling97c72b72016-06-24 16:19:36 +0100434static void GenBitCount(HInvoke* instr, Primitive::Type type, MacroAssembler* masm) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100435 DCHECK(Primitive::IsIntOrLongType(type)) << type;
436 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
437 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
xueliang.zhong49924c92016-03-03 10:52:51 +0000438
xueliang.zhong49924c92016-03-03 10:52:51 +0000439 UseScratchRegisterScope temps(masm);
440
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000441 Register src = InputRegisterAt(instr, 0);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100442 Register dst = RegisterFrom(instr->GetLocations()->Out(), type);
443 FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS();
xueliang.zhong49924c92016-03-03 10:52:51 +0000444
445 __ Fmov(fpr, src);
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000446 __ Cnt(fpr.V8B(), fpr.V8B());
447 __ Addv(fpr.B(), fpr.V8B());
xueliang.zhong49924c92016-03-03 10:52:51 +0000448 __ Fmov(dst, fpr);
449}
450
451void IntrinsicLocationsBuilderARM64::VisitLongBitCount(HInvoke* invoke) {
452 CreateIntToIntLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100456 GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000457}
458
459void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) {
460 CreateIntToIntLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100464 GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000465}
466
Andreas Gampe878d58c2015-01-15 23:24:00 -0800467static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800468 LocationSummary* locations = new (arena) LocationSummary(invoke,
469 LocationSummary::kNoCall,
470 kIntrinsified);
471 locations->SetInAt(0, Location::RequiresFpuRegister());
472 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
473}
474
Scott Wakeling97c72b72016-06-24 16:19:36 +0100475static void MathAbsFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800476 Location in = locations->InAt(0);
477 Location out = locations->Out();
478
479 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
480 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
481
482 __ Fabs(out_reg, in_reg);
483}
484
485void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
486 CreateFPToFPLocations(arena_, invoke);
487}
488
489void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000490 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800491}
492
493void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
494 CreateFPToFPLocations(arena_, invoke);
495}
496
497void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000498 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800499}
500
501static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
502 LocationSummary* locations = new (arena) LocationSummary(invoke,
503 LocationSummary::kNoCall,
504 kIntrinsified);
505 locations->SetInAt(0, Location::RequiresRegister());
506 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
507}
508
509static void GenAbsInteger(LocationSummary* locations,
510 bool is64bit,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100511 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800512 Location in = locations->InAt(0);
513 Location output = locations->Out();
514
515 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
516 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
517
518 __ Cmp(in_reg, Operand(0));
519 __ Cneg(out_reg, in_reg, lt);
520}
521
522void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
523 CreateIntToInt(arena_, invoke);
524}
525
526void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000527 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800528}
529
530void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
531 CreateIntToInt(arena_, invoke);
532}
533
534void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000535 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800536}
537
538static void GenMinMaxFP(LocationSummary* locations,
539 bool is_min,
540 bool is_double,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100541 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800542 Location op1 = locations->InAt(0);
543 Location op2 = locations->InAt(1);
544 Location out = locations->Out();
545
546 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
547 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
548 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
549 if (is_min) {
550 __ Fmin(out_reg, op1_reg, op2_reg);
551 } else {
552 __ Fmax(out_reg, op1_reg, op2_reg);
553 }
554}
555
556static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
557 LocationSummary* locations = new (arena) LocationSummary(invoke,
558 LocationSummary::kNoCall,
559 kIntrinsified);
560 locations->SetInAt(0, Location::RequiresFpuRegister());
561 locations->SetInAt(1, Location::RequiresFpuRegister());
562 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
563}
564
565void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
566 CreateFPFPToFPLocations(arena_, invoke);
567}
568
569void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000570 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800571}
572
573void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
574 CreateFPFPToFPLocations(arena_, invoke);
575}
576
577void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000578 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800579}
580
581void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
582 CreateFPFPToFPLocations(arena_, invoke);
583}
584
585void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000586 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800587}
588
589void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
590 CreateFPFPToFPLocations(arena_, invoke);
591}
592
593void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000594 GenMinMaxFP(
595 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800596}
597
598static void GenMinMax(LocationSummary* locations,
599 bool is_min,
600 bool is_long,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100601 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800602 Location op1 = locations->InAt(0);
603 Location op2 = locations->InAt(1);
604 Location out = locations->Out();
605
606 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
607 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
608 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
609
610 __ Cmp(op1_reg, op2_reg);
611 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
612}
613
Andreas Gampe878d58c2015-01-15 23:24:00 -0800614void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
615 CreateIntIntToIntLocations(arena_, invoke);
616}
617
618void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000619 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800620}
621
622void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
623 CreateIntIntToIntLocations(arena_, invoke);
624}
625
626void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000627 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800628}
629
630void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
631 CreateIntIntToIntLocations(arena_, invoke);
632}
633
634void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000635 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800636}
637
638void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
639 CreateIntIntToIntLocations(arena_, invoke);
640}
641
642void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000643 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800644}
645
646void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
647 CreateFPToFPLocations(arena_, invoke);
648}
649
650void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
651 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100652 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800653 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
654}
655
656void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
657 CreateFPToFPLocations(arena_, invoke);
658}
659
660void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
661 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100662 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800663 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
664}
665
666void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
667 CreateFPToFPLocations(arena_, invoke);
668}
669
670void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
671 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100672 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800673 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
674}
675
676void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
677 CreateFPToFPLocations(arena_, invoke);
678}
679
680void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
681 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100682 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800683 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
684}
685
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100686static void CreateFPToIntPlusFPTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800687 LocationSummary* locations = new (arena) LocationSummary(invoke,
688 LocationSummary::kNoCall,
689 kIntrinsified);
690 locations->SetInAt(0, Location::RequiresFpuRegister());
691 locations->SetOut(Location::RequiresRegister());
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100692 locations->AddTemp(Location::RequiresFpuRegister());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800693}
694
Scott Wakeling97c72b72016-06-24 16:19:36 +0100695static void GenMathRound(HInvoke* invoke, bool is_double, vixl::aarch64::MacroAssembler* masm) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100696 // Java 8 API definition for Math.round():
697 // Return the closest long or int to the argument, with ties rounding to positive infinity.
698 //
699 // There is no single instruction in ARMv8 that can support the above definition.
700 // We choose to use FCVTAS here, because it has closest semantic.
701 // FCVTAS performs rounding to nearest integer, ties away from zero.
702 // For most inputs (positive values, zero or NaN), this instruction is enough.
703 // We only need a few handling code after FCVTAS if the input is negative half value.
704 //
705 // The reason why we didn't choose FCVTPS instruction here is that
706 // although it performs rounding toward positive infinity, it doesn't perform rounding to nearest.
707 // For example, FCVTPS(-1.9) = -1 and FCVTPS(1.1) = 2.
708 // If we were using this instruction, for most inputs, more handling code would be needed.
709 LocationSummary* l = invoke->GetLocations();
710 FPRegister in_reg = is_double ? DRegisterFrom(l->InAt(0)) : SRegisterFrom(l->InAt(0));
711 FPRegister tmp_fp = is_double ? DRegisterFrom(l->GetTemp(0)) : SRegisterFrom(l->GetTemp(0));
712 Register out_reg = is_double ? XRegisterFrom(l->Out()) : WRegisterFrom(l->Out());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100713 vixl::aarch64::Label done;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800714
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100715 // Round to nearest integer, ties away from zero.
716 __ Fcvtas(out_reg, in_reg);
717
718 // For positive values, zero or NaN inputs, rounding is done.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100719 __ Tbz(out_reg, out_reg.GetSizeInBits() - 1, &done);
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100720
721 // Handle input < 0 cases.
722 // If input is negative but not a tie, previous result (round to nearest) is valid.
723 // If input is a negative tie, out_reg += 1.
724 __ Frinta(tmp_fp, in_reg);
725 __ Fsub(tmp_fp, in_reg, tmp_fp);
726 __ Fcmp(tmp_fp, 0.5);
727 __ Cinc(out_reg, out_reg, eq);
728
729 __ Bind(&done);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800730}
731
732void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100733 CreateFPToIntPlusFPTempLocations(arena_, invoke);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800734}
735
736void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100737 GenMathRound(invoke, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800738}
739
740void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100741 CreateFPToIntPlusFPTempLocations(arena_, invoke);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800742}
743
744void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100745 GenMathRound(invoke, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800746}
747
748void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
749 CreateIntToIntLocations(arena_, invoke);
750}
751
752void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100753 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800754 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
755 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
756}
757
758void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
759 CreateIntToIntLocations(arena_, invoke);
760}
761
762void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100763 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800764 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
765 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
766}
767
768void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
769 CreateIntToIntLocations(arena_, invoke);
770}
771
772void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100773 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800774 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
775 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
776}
777
778void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
779 CreateIntToIntLocations(arena_, invoke);
780}
781
782void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100783 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800784 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
785 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
786}
787
788static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
789 LocationSummary* locations = new (arena) LocationSummary(invoke,
790 LocationSummary::kNoCall,
791 kIntrinsified);
792 locations->SetInAt(0, Location::RequiresRegister());
793 locations->SetInAt(1, Location::RequiresRegister());
794}
795
796void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
797 CreateIntIntToVoidLocations(arena_, invoke);
798}
799
800void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100801 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800802 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
803 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
804}
805
806void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
807 CreateIntIntToVoidLocations(arena_, invoke);
808}
809
810void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100811 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800812 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
813 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
814}
815
816void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
817 CreateIntIntToVoidLocations(arena_, invoke);
818}
819
820void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100821 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800822 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
823 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
824}
825
826void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
827 CreateIntIntToVoidLocations(arena_, invoke);
828}
829
830void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100831 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800832 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
833 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
834}
835
836void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
837 LocationSummary* locations = new (arena_) LocationSummary(invoke,
838 LocationSummary::kNoCall,
839 kIntrinsified);
840 locations->SetOut(Location::RequiresRegister());
841}
842
843void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
844 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
Andreas Gampe542451c2016-07-26 09:02:02 -0700845 MemOperand(tr, Thread::PeerOffset<kArm64PointerSize>().Int32Value()));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800846}
847
848static void GenUnsafeGet(HInvoke* invoke,
849 Primitive::Type type,
850 bool is_volatile,
851 CodeGeneratorARM64* codegen) {
852 LocationSummary* locations = invoke->GetLocations();
853 DCHECK((type == Primitive::kPrimInt) ||
854 (type == Primitive::kPrimLong) ||
855 (type == Primitive::kPrimNot));
Alexandre Rames087930f2016-08-02 13:45:28 +0100856 MacroAssembler* masm = codegen->GetVIXLAssembler();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000857 Location base_loc = locations->InAt(1);
858 Register base = WRegisterFrom(base_loc); // Object pointer.
859 Location offset_loc = locations->InAt(2);
860 Register offset = XRegisterFrom(offset_loc); // Long offset.
861 Location trg_loc = locations->Out();
862 Register trg = RegisterFrom(trg_loc, type);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800863
Roland Levillain44015862016-01-22 11:47:17 +0000864 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
865 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
866 UseScratchRegisterScope temps(masm);
867 Register temp = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +0100868 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
869 trg_loc,
870 base,
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100871 /* offset */ 0u,
Roland Levillainbfea3352016-06-23 13:48:47 +0100872 /* index */ offset_loc,
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100873 /* scale_factor */ 0u,
Roland Levillainbfea3352016-06-23 13:48:47 +0100874 temp,
875 /* needs_null_check */ false,
876 is_volatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800877 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000878 // Other cases.
879 MemOperand mem_op(base.X(), offset);
880 if (is_volatile) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000881 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
Roland Levillain44015862016-01-22 11:47:17 +0000882 } else {
883 codegen->Load(type, trg, mem_op);
884 }
Roland Levillain4d027112015-07-01 15:41:14 +0100885
Roland Levillain44015862016-01-22 11:47:17 +0000886 if (type == Primitive::kPrimNot) {
887 DCHECK(trg.IsW());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100888 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0u, offset_loc);
Roland Levillain44015862016-01-22 11:47:17 +0000889 }
Roland Levillain4d027112015-07-01 15:41:14 +0100890 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800891}
892
893static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000894 bool can_call = kEmitCompilerReadBarrier &&
895 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
896 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800897 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100898 (can_call
899 ? LocationSummary::kCallOnSlowPath
900 : LocationSummary::kNoCall),
Andreas Gampe878d58c2015-01-15 23:24:00 -0800901 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +0100902 if (can_call && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100903 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +0100904 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800905 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
906 locations->SetInAt(1, Location::RequiresRegister());
907 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100908 locations->SetOut(Location::RequiresRegister(),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100909 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800910}
911
912void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
913 CreateIntIntIntToIntLocations(arena_, invoke);
914}
915void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
916 CreateIntIntIntToIntLocations(arena_, invoke);
917}
918void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
919 CreateIntIntIntToIntLocations(arena_, invoke);
920}
921void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
922 CreateIntIntIntToIntLocations(arena_, invoke);
923}
924void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
925 CreateIntIntIntToIntLocations(arena_, invoke);
926}
927void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
928 CreateIntIntIntToIntLocations(arena_, invoke);
929}
930
931void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000932 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800933}
934void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000935 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800936}
937void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000938 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800939}
940void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000941 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800942}
943void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000944 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800945}
946void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000947 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800948}
949
950static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
951 LocationSummary* locations = new (arena) LocationSummary(invoke,
952 LocationSummary::kNoCall,
953 kIntrinsified);
954 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
955 locations->SetInAt(1, Location::RequiresRegister());
956 locations->SetInAt(2, Location::RequiresRegister());
957 locations->SetInAt(3, Location::RequiresRegister());
958}
959
960void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
961 CreateIntIntIntIntToVoid(arena_, invoke);
962}
963void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
964 CreateIntIntIntIntToVoid(arena_, invoke);
965}
966void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
967 CreateIntIntIntIntToVoid(arena_, invoke);
968}
969void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
970 CreateIntIntIntIntToVoid(arena_, invoke);
971}
972void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
973 CreateIntIntIntIntToVoid(arena_, invoke);
974}
975void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
976 CreateIntIntIntIntToVoid(arena_, invoke);
977}
978void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
979 CreateIntIntIntIntToVoid(arena_, invoke);
980}
981void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
982 CreateIntIntIntIntToVoid(arena_, invoke);
983}
984void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
985 CreateIntIntIntIntToVoid(arena_, invoke);
986}
987
Artem Serov914d7a82017-02-07 14:33:49 +0000988static void GenUnsafePut(HInvoke* invoke,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800989 Primitive::Type type,
990 bool is_volatile,
991 bool is_ordered,
992 CodeGeneratorARM64* codegen) {
Artem Serov914d7a82017-02-07 14:33:49 +0000993 LocationSummary* locations = invoke->GetLocations();
Alexandre Rames087930f2016-08-02 13:45:28 +0100994 MacroAssembler* masm = codegen->GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800995
996 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
997 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
998 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100999 Register source = value;
Andreas Gampe878d58c2015-01-15 23:24:00 -08001000 MemOperand mem_op(base.X(), offset);
1001
Roland Levillain4d027112015-07-01 15:41:14 +01001002 {
1003 // We use a block to end the scratch scope before the write barrier, thus
1004 // freeing the temporary registers so they can be used in `MarkGCCard`.
1005 UseScratchRegisterScope temps(masm);
1006
1007 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1008 DCHECK(value.IsW());
1009 Register temp = temps.AcquireW();
1010 __ Mov(temp.W(), value.W());
1011 codegen->GetAssembler()->PoisonHeapReference(temp.W());
1012 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -08001013 }
Roland Levillain4d027112015-07-01 15:41:14 +01001014
1015 if (is_volatile || is_ordered) {
Artem Serov914d7a82017-02-07 14:33:49 +00001016 codegen->StoreRelease(invoke, type, source, mem_op, /* needs_null_check */ false);
Roland Levillain4d027112015-07-01 15:41:14 +01001017 } else {
1018 codegen->Store(type, source, mem_op);
1019 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001020 }
1021
1022 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001023 bool value_can_be_null = true; // TODO: Worth finding out this information?
1024 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001025 }
1026}
1027
1028void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001029 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001030 Primitive::kPrimInt,
1031 /* is_volatile */ false,
1032 /* is_ordered */ false,
1033 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001034}
1035void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001036 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001037 Primitive::kPrimInt,
1038 /* is_volatile */ false,
1039 /* is_ordered */ true,
1040 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001041}
1042void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001043 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001044 Primitive::kPrimInt,
1045 /* is_volatile */ true,
1046 /* is_ordered */ false,
1047 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001048}
1049void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001050 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001051 Primitive::kPrimNot,
1052 /* is_volatile */ false,
1053 /* is_ordered */ false,
1054 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001055}
1056void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001057 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001058 Primitive::kPrimNot,
1059 /* is_volatile */ false,
1060 /* is_ordered */ true,
1061 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001062}
1063void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001064 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001065 Primitive::kPrimNot,
1066 /* is_volatile */ true,
1067 /* is_ordered */ false,
1068 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001069}
1070void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001071 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001072 Primitive::kPrimLong,
1073 /* is_volatile */ false,
1074 /* is_ordered */ false,
1075 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001076}
1077void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001078 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001079 Primitive::kPrimLong,
1080 /* is_volatile */ false,
1081 /* is_ordered */ true,
1082 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001083}
1084void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Artem Serov914d7a82017-02-07 14:33:49 +00001085 GenUnsafePut(invoke,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001086 Primitive::kPrimLong,
1087 /* is_volatile */ true,
1088 /* is_ordered */ false,
1089 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001090}
1091
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001092static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
1093 HInvoke* invoke,
1094 Primitive::Type type) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001095 bool can_call = kEmitCompilerReadBarrier &&
1096 kUseBakerReadBarrier &&
1097 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001098 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001099 (can_call
1100 ? LocationSummary::kCallOnSlowPath
1101 : LocationSummary::kNoCall),
Andreas Gampe878d58c2015-01-15 23:24:00 -08001102 kIntrinsified);
1103 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1104 locations->SetInAt(1, Location::RequiresRegister());
1105 locations->SetInAt(2, Location::RequiresRegister());
1106 locations->SetInAt(3, Location::RequiresRegister());
1107 locations->SetInAt(4, Location::RequiresRegister());
1108
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001109 // If heap poisoning is enabled, we don't want the unpoisoning
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001110 // operations to potentially clobber the output. Likewise when
1111 // emitting a (Baker) read barrier, which may call.
1112 Location::OutputOverlap overlaps =
1113 ((kPoisonHeapReferences && type == Primitive::kPrimNot) || can_call)
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001114 ? Location::kOutputOverlap
1115 : Location::kNoOutputOverlap;
1116 locations->SetOut(Location::RequiresRegister(), overlaps);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001117 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1118 // Temporary register for (Baker) read barrier.
1119 locations->AddTemp(Location::RequiresRegister());
1120 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001121}
1122
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001123static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorARM64* codegen) {
Alexandre Rames087930f2016-08-02 13:45:28 +01001124 MacroAssembler* masm = codegen->GetVIXLAssembler();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001125 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001126
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001127 Location out_loc = locations->Out();
1128 Register out = WRegisterFrom(out_loc); // Boolean result.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001129
1130 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001131 Location offset_loc = locations->InAt(2);
1132 Register offset = XRegisterFrom(offset_loc); // Long offset.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001133 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1134 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1135
1136 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1137 if (type == Primitive::kPrimNot) {
1138 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001139 bool value_can_be_null = true; // TODO: Worth finding out this information?
1140 codegen->MarkGCCard(base, value, value_can_be_null);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001141
1142 // The only read barrier implementation supporting the
1143 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1144 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1145
1146 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1147 Register temp = WRegisterFrom(locations->GetTemp(0));
1148 // Need to make sure the reference stored in the field is a to-space
1149 // one before attempting the CAS or the CAS could fail incorrectly.
1150 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1151 invoke,
1152 out_loc, // Unused, used only as a "temporary" within the read barrier.
1153 base,
1154 /* offset */ 0u,
1155 /* index */ offset_loc,
1156 /* scale_factor */ 0u,
1157 temp,
1158 /* needs_null_check */ false,
1159 /* use_load_acquire */ false,
1160 /* always_update_field */ true);
1161 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001162 }
1163
1164 UseScratchRegisterScope temps(masm);
1165 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1166 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1167
1168 Register tmp_32 = tmp_value.W();
1169
1170 __ Add(tmp_ptr, base.X(), Operand(offset));
1171
Roland Levillain4d027112015-07-01 15:41:14 +01001172 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1173 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001174 if (value.Is(expected)) {
1175 // Do not poison `value`, as it is the same register as
1176 // `expected`, which has just been poisoned.
1177 } else {
1178 codegen->GetAssembler()->PoisonHeapReference(value);
1179 }
Roland Levillain4d027112015-07-01 15:41:14 +01001180 }
1181
Andreas Gampe878d58c2015-01-15 23:24:00 -08001182 // do {
1183 // tmp_value = [tmp_ptr] - expected;
1184 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1185 // result = tmp_value != 0;
1186
Scott Wakeling97c72b72016-06-24 16:19:36 +01001187 vixl::aarch64::Label loop_head, exit_loop;
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001188 __ Bind(&loop_head);
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001189 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1190 __ Cmp(tmp_value, expected);
1191 __ B(&exit_loop, ne);
1192 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1193 __ Cbnz(tmp_32, &loop_head);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001194 __ Bind(&exit_loop);
1195 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001196
1197 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001198 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001199 if (value.Is(expected)) {
1200 // Do not unpoison `value`, as it is the same register as
1201 // `expected`, which has just been unpoisoned.
1202 } else {
1203 codegen->GetAssembler()->UnpoisonHeapReference(value);
1204 }
Roland Levillain4d027112015-07-01 15:41:14 +01001205 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001206}
1207
1208void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001209 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001210}
1211void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001212 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001213}
1214void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001215 // The only read barrier implementation supporting the
1216 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1217 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001218 return;
1219 }
1220
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001221 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001222}
1223
1224void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001225 GenCas(invoke, Primitive::kPrimInt, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001226}
1227void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001228 GenCas(invoke, Primitive::kPrimLong, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001229}
1230void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001231 // The only read barrier implementation supporting the
1232 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1233 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01001234
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001235 GenCas(invoke, Primitive::kPrimNot, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001236}
1237
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001238void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001239 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakeling1f36f412016-04-21 11:13:45 +01001240 invoke->InputAt(1)->CanBeNull()
1241 ? LocationSummary::kCallOnSlowPath
1242 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001243 kIntrinsified);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001244 locations->SetInAt(0, Location::RequiresRegister());
1245 locations->SetInAt(1, Location::RequiresRegister());
1246 locations->AddTemp(Location::RequiresRegister());
1247 locations->AddTemp(Location::RequiresRegister());
1248 locations->AddTemp(Location::RequiresRegister());
jessicahandojo05765752016-09-09 19:01:32 -07001249 // Need temporary registers for String compression's feature.
1250 if (mirror::kUseStringCompression) {
1251 locations->AddTemp(Location::RequiresRegister());
jessicahandojo05765752016-09-09 19:01:32 -07001252 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001253 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001254}
1255
1256void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001257 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001258 LocationSummary* locations = invoke->GetLocations();
1259
Alexandre Rames2ea91532016-08-11 17:04:14 +01001260 Register str = InputRegisterAt(invoke, 0);
1261 Register arg = InputRegisterAt(invoke, 1);
1262 DCHECK(str.IsW());
1263 DCHECK(arg.IsW());
Scott Wakeling1f36f412016-04-21 11:13:45 +01001264 Register out = OutputRegister(invoke);
1265
1266 Register temp0 = WRegisterFrom(locations->GetTemp(0));
1267 Register temp1 = WRegisterFrom(locations->GetTemp(1));
1268 Register temp2 = WRegisterFrom(locations->GetTemp(2));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001269 Register temp3;
jessicahandojo05765752016-09-09 19:01:32 -07001270 if (mirror::kUseStringCompression) {
1271 temp3 = WRegisterFrom(locations->GetTemp(3));
jessicahandojo05765752016-09-09 19:01:32 -07001272 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001273
Scott Wakeling97c72b72016-06-24 16:19:36 +01001274 vixl::aarch64::Label loop;
1275 vixl::aarch64::Label find_char_diff;
1276 vixl::aarch64::Label end;
jessicahandojo05765752016-09-09 19:01:32 -07001277 vixl::aarch64::Label different_compression;
Scott Wakeling1f36f412016-04-21 11:13:45 +01001278
1279 // Get offsets of count and value fields within a string object.
1280 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1281 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1282
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001283 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001284 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001285
Scott Wakeling1f36f412016-04-21 11:13:45 +01001286 // Take slow path and throw if input can be and is null.
1287 SlowPathCodeARM64* slow_path = nullptr;
1288 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1289 if (can_slow_path) {
1290 slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1291 codegen_->AddSlowPath(slow_path);
1292 __ Cbz(arg, slow_path->GetEntryLabel());
1293 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001294
Scott Wakeling1f36f412016-04-21 11:13:45 +01001295 // Reference equality check, return 0 if same reference.
1296 __ Subs(out, str, arg);
1297 __ B(&end, eq);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001298
jessicahandojo05765752016-09-09 19:01:32 -07001299 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001300 // Load `count` fields of this and argument strings.
jessicahandojo05765752016-09-09 19:01:32 -07001301 __ Ldr(temp3, HeapOperand(str, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001302 __ Ldr(temp2, HeapOperand(arg, count_offset));
jessicahandojo05765752016-09-09 19:01:32 -07001303 // Clean out compression flag from lengths.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001304 __ Lsr(temp0, temp3, 1u);
1305 __ Lsr(temp1, temp2, 1u);
jessicahandojo05765752016-09-09 19:01:32 -07001306 } else {
1307 // Load lengths of this and argument strings.
1308 __ Ldr(temp0, HeapOperand(str, count_offset));
1309 __ Ldr(temp1, HeapOperand(arg, count_offset));
1310 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001311 // out = length diff.
1312 __ Subs(out, temp0, temp1);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001313 // temp0 = min(len(str), len(arg)).
1314 __ Csel(temp0, temp1, temp0, ge);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001315 // Shorter string is empty?
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001316 __ Cbz(temp0, &end);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001317
jessicahandojo05765752016-09-09 19:01:32 -07001318 if (mirror::kUseStringCompression) {
1319 // Check if both strings using same compression style to use this comparison loop.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001320 __ Eor(temp2, temp2, Operand(temp3));
1321 // Interleave with compression flag extraction which is needed for both paths
1322 // and also set flags which is needed only for the different compressions path.
1323 __ Ands(temp3.W(), temp3.W(), Operand(1));
1324 __ Tbnz(temp2, 0, &different_compression); // Does not use flags.
jessicahandojo05765752016-09-09 19:01:32 -07001325 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001326 // Store offset of string value in preparation for comparison loop.
1327 __ Mov(temp1, value_offset);
jessicahandojo05765752016-09-09 19:01:32 -07001328 if (mirror::kUseStringCompression) {
1329 // For string compression, calculate the number of bytes to compare (not chars).
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001330 // This could in theory exceed INT32_MAX, so treat temp0 as unsigned.
1331 __ Lsl(temp0, temp0, temp3);
jessicahandojo05765752016-09-09 19:01:32 -07001332 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001333
1334 UseScratchRegisterScope scratch_scope(masm);
1335 Register temp4 = scratch_scope.AcquireX();
1336
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001337 // Assertions that must hold in order to compare strings 8 bytes at a time.
Scott Wakeling1f36f412016-04-21 11:13:45 +01001338 DCHECK_ALIGNED(value_offset, 8);
1339 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1340
1341 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1342 DCHECK_EQ(char_size, 2u);
1343
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001344 // Promote temp2 to an X reg, ready for LDR.
1345 temp2 = temp2.X();
Scott Wakeling1f36f412016-04-21 11:13:45 +01001346
1347 // Loop to compare 4x16-bit characters at a time (ok because of string data alignment).
1348 __ Bind(&loop);
Alexandre Rames2ea91532016-08-11 17:04:14 +01001349 __ Ldr(temp4, MemOperand(str.X(), temp1.X()));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001350 __ Ldr(temp2, MemOperand(arg.X(), temp1.X()));
1351 __ Cmp(temp4, temp2);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001352 __ B(ne, &find_char_diff);
1353 __ Add(temp1, temp1, char_size * 4);
jessicahandojo05765752016-09-09 19:01:32 -07001354 // With string compression, we have compared 8 bytes, otherwise 4 chars.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001355 __ Subs(temp0, temp0, (mirror::kUseStringCompression) ? 8 : 4);
1356 __ B(&loop, hi);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001357 __ B(&end);
1358
1359 // Promote temp1 to an X reg, ready for EOR.
1360 temp1 = temp1.X();
1361
jessicahandojo05765752016-09-09 19:01:32 -07001362 // Find the single character difference.
Scott Wakeling1f36f412016-04-21 11:13:45 +01001363 __ Bind(&find_char_diff);
1364 // Get the bit position of the first character that differs.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001365 __ Eor(temp1, temp2, temp4);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001366 __ Rbit(temp1, temp1);
1367 __ Clz(temp1, temp1);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001368
jessicahandojo05765752016-09-09 19:01:32 -07001369 // If the number of chars remaining <= the index where the difference occurs (0-3), then
Scott Wakeling1f36f412016-04-21 11:13:45 +01001370 // the difference occurs outside the remaining string data, so just return length diff (out).
jessicahandojo05765752016-09-09 19:01:32 -07001371 // Unlike ARM, we're doing the comparison in one go here, without the subtraction at the
1372 // find_char_diff_2nd_cmp path, so it doesn't matter whether the comparison is signed or
1373 // unsigned when string compression is disabled.
1374 // When it's enabled, the comparison must be unsigned.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001375 __ Cmp(temp0, Operand(temp1.W(), LSR, (mirror::kUseStringCompression) ? 3 : 4));
jessicahandojo05765752016-09-09 19:01:32 -07001376 __ B(ls, &end);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001377
Scott Wakeling1f36f412016-04-21 11:13:45 +01001378 // Extract the characters and calculate the difference.
jessicahandojo05765752016-09-09 19:01:32 -07001379 if (mirror:: kUseStringCompression) {
jessicahandojo05765752016-09-09 19:01:32 -07001380 __ Bic(temp1, temp1, 0x7);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001381 __ Bic(temp1, temp1, Operand(temp3.X(), LSL, 3u));
1382 } else {
1383 __ Bic(temp1, temp1, 0xf);
jessicahandojo05765752016-09-09 19:01:32 -07001384 }
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001385 __ Lsr(temp2, temp2, temp1);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001386 __ Lsr(temp4, temp4, temp1);
jessicahandojo05765752016-09-09 19:01:32 -07001387 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001388 // Prioritize the case of compressed strings and calculate such result first.
1389 __ Uxtb(temp1, temp4);
1390 __ Sub(out, temp1.W(), Operand(temp2.W(), UXTB));
1391 __ Tbz(temp3, 0u, &end); // If actually compressed, we're done.
jessicahandojo05765752016-09-09 19:01:32 -07001392 }
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001393 __ Uxth(temp4, temp4);
1394 __ Sub(out, temp4.W(), Operand(temp2.W(), UXTH));
jessicahandojo05765752016-09-09 19:01:32 -07001395
1396 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001397 __ B(&end);
1398 __ Bind(&different_compression);
1399
1400 // Comparison for different compression style.
jessicahandojo05765752016-09-09 19:01:32 -07001401 const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
1402 DCHECK_EQ(c_char_size, 1u);
jessicahandojo05765752016-09-09 19:01:32 -07001403 temp1 = temp1.W();
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001404 temp2 = temp2.W();
1405 temp4 = temp4.W();
jessicahandojo05765752016-09-09 19:01:32 -07001406
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001407 // `temp1` will hold the compressed data pointer, `temp2` the uncompressed data pointer.
1408 // Note that flags have been set by the `str` compression flag extraction to `temp3`
1409 // before branching to the `different_compression` label.
1410 __ Csel(temp1, str, arg, eq); // Pointer to the compressed string.
1411 __ Csel(temp2, str, arg, ne); // Pointer to the uncompressed string.
jessicahandojo05765752016-09-09 19:01:32 -07001412
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001413 // We want to free up the temp3, currently holding `str` compression flag, for comparison.
1414 // So, we move it to the bottom bit of the iteration count `temp0` which we then need to treat
1415 // as unsigned. Start by freeing the bit with a LSL and continue further down by a SUB which
1416 // will allow `subs temp0, #2; bhi different_compression_loop` to serve as the loop condition.
1417 __ Lsl(temp0, temp0, 1u);
1418
1419 // Adjust temp1 and temp2 from string pointers to data pointers.
1420 __ Add(temp1, temp1, Operand(value_offset));
1421 __ Add(temp2, temp2, Operand(value_offset));
1422
1423 // Complete the move of the compression flag.
1424 __ Sub(temp0, temp0, Operand(temp3));
1425
1426 vixl::aarch64::Label different_compression_loop;
1427 vixl::aarch64::Label different_compression_diff;
1428
1429 __ Bind(&different_compression_loop);
1430 __ Ldrb(temp4, MemOperand(temp1.X(), c_char_size, PostIndex));
1431 __ Ldrh(temp3, MemOperand(temp2.X(), char_size, PostIndex));
1432 __ Subs(temp4, temp4, Operand(temp3));
1433 __ B(&different_compression_diff, ne);
1434 __ Subs(temp0, temp0, 2);
1435 __ B(&different_compression_loop, hi);
jessicahandojo05765752016-09-09 19:01:32 -07001436 __ B(&end);
1437
1438 // Calculate the difference.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001439 __ Bind(&different_compression_diff);
1440 __ Tst(temp0, Operand(1));
1441 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1442 "Expecting 0=compressed, 1=uncompressed");
1443 __ Cneg(out, temp4, ne);
jessicahandojo05765752016-09-09 19:01:32 -07001444 }
Scott Wakeling1f36f412016-04-21 11:13:45 +01001445
1446 __ Bind(&end);
1447
1448 if (can_slow_path) {
1449 __ Bind(slow_path->GetExitLabel());
1450 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001451}
1452
Vladimir Markoe39f14f2017-02-10 15:44:25 +00001453// The cut off for unrolling the loop in String.equals() intrinsic for const strings.
1454// The normal loop plus the pre-header is 9 instructions without string compression and 12
1455// instructions with string compression. We can compare up to 8 bytes in 4 instructions
1456// (LDR+LDR+CMP+BNE) and up to 16 bytes in 5 instructions (LDP+LDP+CMP+CCMP+BNE). Allow up
1457// to 10 instructions for the unrolled loop.
1458constexpr size_t kShortConstStringEqualsCutoffInBytes = 32;
1459
1460static const char* GetConstString(HInstruction* candidate, uint32_t* utf16_length) {
1461 if (candidate->IsLoadString()) {
1462 HLoadString* load_string = candidate->AsLoadString();
1463 const DexFile& dex_file = load_string->GetDexFile();
1464 return dex_file.StringDataAndUtf16LengthByIdx(load_string->GetStringIndex(), utf16_length);
1465 }
1466 return nullptr;
1467}
1468
Agi Csakiea34b402015-08-13 17:51:19 -07001469void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1470 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1471 LocationSummary::kNoCall,
1472 kIntrinsified);
1473 locations->SetInAt(0, Location::RequiresRegister());
1474 locations->SetInAt(1, Location::RequiresRegister());
Agi Csakiea34b402015-08-13 17:51:19 -07001475
Vladimir Markoe39f14f2017-02-10 15:44:25 +00001476 // For the generic implementation and for long const strings we need a temporary.
1477 // We do not need it for short const strings, up to 8 bytes, see code generation below.
1478 uint32_t const_string_length = 0u;
1479 const char* const_string = GetConstString(invoke->InputAt(0), &const_string_length);
1480 if (const_string == nullptr) {
1481 const_string = GetConstString(invoke->InputAt(1), &const_string_length);
1482 }
1483 bool is_compressed =
1484 mirror::kUseStringCompression &&
1485 const_string != nullptr &&
1486 mirror::String::DexFileStringAllASCII(const_string, const_string_length);
1487 if (const_string == nullptr || const_string_length > (is_compressed ? 8u : 4u)) {
1488 locations->AddTemp(Location::RequiresRegister());
1489 }
1490
1491 // TODO: If the String.equals() is used only for an immediately following HIf, we can
1492 // mark it as emitted-at-use-site and emit branches directly to the appropriate blocks.
1493 // Then we shall need an extra temporary register instead of the output register.
Agi Csakiea34b402015-08-13 17:51:19 -07001494 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1495}
1496
1497void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001498 MacroAssembler* masm = GetVIXLAssembler();
Agi Csakiea34b402015-08-13 17:51:19 -07001499 LocationSummary* locations = invoke->GetLocations();
1500
1501 Register str = WRegisterFrom(locations->InAt(0));
1502 Register arg = WRegisterFrom(locations->InAt(1));
1503 Register out = XRegisterFrom(locations->Out());
1504
1505 UseScratchRegisterScope scratch_scope(masm);
1506 Register temp = scratch_scope.AcquireW();
Vladimir Markoe39f14f2017-02-10 15:44:25 +00001507 Register temp1 = scratch_scope.AcquireW();
Agi Csakiea34b402015-08-13 17:51:19 -07001508
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001509 vixl::aarch64::Label loop;
Scott Wakeling97c72b72016-06-24 16:19:36 +01001510 vixl::aarch64::Label end;
1511 vixl::aarch64::Label return_true;
1512 vixl::aarch64::Label return_false;
Agi Csakiea34b402015-08-13 17:51:19 -07001513
1514 // Get offsets of count, value, and class fields within a string object.
1515 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1516 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1517 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1518
1519 // Note that the null check must have been done earlier.
1520 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1521
Vladimir Marko53b52002016-05-24 19:30:45 +01001522 StringEqualsOptimizations optimizations(invoke);
1523 if (!optimizations.GetArgumentNotNull()) {
1524 // Check if input is null, return false if it is.
1525 __ Cbz(arg, &return_false);
1526 }
Agi Csakiea34b402015-08-13 17:51:19 -07001527
1528 // Reference equality check, return true if same reference.
1529 __ Cmp(str, arg);
1530 __ B(&return_true, eq);
1531
Vladimir Marko53b52002016-05-24 19:30:45 +01001532 if (!optimizations.GetArgumentIsString()) {
1533 // Instanceof check for the argument by comparing class fields.
1534 // All string objects must have the same type since String cannot be subclassed.
1535 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1536 // If the argument is a string object, its class field must be equal to receiver's class field.
1537 __ Ldr(temp, MemOperand(str.X(), class_offset));
1538 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1539 __ Cmp(temp, temp1);
1540 __ B(&return_false, ne);
1541 }
Agi Csakiea34b402015-08-13 17:51:19 -07001542
Vladimir Markoe39f14f2017-02-10 15:44:25 +00001543 // Check if one of the inputs is a const string. Do not special-case both strings
1544 // being const, such cases should be handled by constant folding if needed.
1545 uint32_t const_string_length = 0u;
1546 const char* const_string = GetConstString(invoke->InputAt(0), &const_string_length);
1547 if (const_string == nullptr) {
1548 const_string = GetConstString(invoke->InputAt(1), &const_string_length);
1549 if (const_string != nullptr) {
1550 std::swap(str, arg); // Make sure the const string is in `str`.
1551 }
1552 }
1553 bool is_compressed =
1554 mirror::kUseStringCompression &&
1555 const_string != nullptr &&
1556 mirror::String::DexFileStringAllASCII(const_string, const_string_length);
1557
1558 if (const_string != nullptr) {
1559 // Load `count` field of the argument string and check if it matches the const string.
1560 // Also compares the compression style, if differs return false.
1561 __ Ldr(temp, MemOperand(arg.X(), count_offset));
1562 __ Cmp(temp, Operand(mirror::String::GetFlaggedCount(const_string_length, is_compressed)));
1563 __ B(&return_false, ne);
1564 } else {
1565 // Load `count` fields of this and argument strings.
1566 __ Ldr(temp, MemOperand(str.X(), count_offset));
1567 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1568 // Check if `count` fields are equal, return false if they're not.
1569 // Also compares the compression style, if differs return false.
1570 __ Cmp(temp, temp1);
1571 __ B(&return_false, ne);
1572 }
Agi Csakiea34b402015-08-13 17:51:19 -07001573
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001574 // Assertions that must hold in order to compare strings 8 bytes at a time.
Agi Csakiea34b402015-08-13 17:51:19 -07001575 DCHECK_ALIGNED(value_offset, 8);
1576 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1577
Vladimir Markoe39f14f2017-02-10 15:44:25 +00001578 if (const_string != nullptr &&
1579 const_string_length < (is_compressed ? kShortConstStringEqualsCutoffInBytes
1580 : kShortConstStringEqualsCutoffInBytes / 2u)) {
1581 // Load and compare the contents. Though we know the contents of the short const string
1582 // at compile time, materializing constants may be more code than loading from memory.
1583 int32_t offset = value_offset;
1584 size_t remaining_bytes =
1585 RoundUp(is_compressed ? const_string_length : const_string_length * 2u, 8u);
1586 temp = temp.X();
1587 temp1 = temp1.X();
1588 while (remaining_bytes > 8u) {
1589 Register temp2 = XRegisterFrom(locations->GetTemp(0));
1590 __ Ldp(temp, temp1, MemOperand(str.X(), offset));
1591 __ Ldp(temp2, out, MemOperand(arg.X(), offset));
1592 __ Cmp(temp, temp2);
1593 __ Ccmp(temp1, out, NoFlag, eq);
1594 __ B(&return_false, ne);
1595 offset += 2u * sizeof(uint64_t);
1596 remaining_bytes -= 2u * sizeof(uint64_t);
1597 }
1598 if (remaining_bytes != 0u) {
1599 __ Ldr(temp, MemOperand(str.X(), offset));
1600 __ Ldr(temp1, MemOperand(arg.X(), offset));
1601 __ Cmp(temp, temp1);
1602 __ B(&return_false, ne);
1603 }
1604 } else {
1605 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1606 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1607 "Expecting 0=compressed, 1=uncompressed");
1608 __ Cbz(temp, &return_true);
1609
1610 if (mirror::kUseStringCompression) {
1611 // For string compression, calculate the number of bytes to compare (not chars).
1612 // This could in theory exceed INT32_MAX, so treat temp as unsigned.
1613 __ And(temp1, temp, Operand(1)); // Extract compression flag.
1614 __ Lsr(temp, temp, 1u); // Extract length.
1615 __ Lsl(temp, temp, temp1); // Calculate number of bytes to compare.
1616 }
1617
1618 // Store offset of string value in preparation for comparison loop
1619 __ Mov(temp1, value_offset);
1620
1621 temp1 = temp1.X();
1622 Register temp2 = XRegisterFrom(locations->GetTemp(0));
1623 // Loop to compare strings 8 bytes at a time starting at the front of the string.
1624 // Ok to do this because strings are zero-padded to kObjectAlignment.
1625 __ Bind(&loop);
1626 __ Ldr(out, MemOperand(str.X(), temp1));
1627 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1628 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1629 __ Cmp(out, temp2);
1630 __ B(&return_false, ne);
1631 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1632 __ Sub(temp, temp, Operand(mirror::kUseStringCompression ? 8 : 4), SetFlags);
1633 __ B(&loop, hi);
jessicahandojo05765752016-09-09 19:01:32 -07001634 }
1635
Agi Csakiea34b402015-08-13 17:51:19 -07001636 // Return true and exit the function.
1637 // If loop does not result in returning false, we return true.
1638 __ Bind(&return_true);
1639 __ Mov(out, 1);
1640 __ B(&end);
1641
1642 // Return false and exit the function.
1643 __ Bind(&return_false);
1644 __ Mov(out, 0);
1645 __ Bind(&end);
1646}
1647
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001648static void GenerateVisitStringIndexOf(HInvoke* invoke,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001649 MacroAssembler* masm,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001650 CodeGeneratorARM64* codegen,
1651 ArenaAllocator* allocator,
1652 bool start_at_zero) {
1653 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001654
1655 // Note that the null check must have been done earlier.
1656 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1657
1658 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001659 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001660 SlowPathCodeARM64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001661 HInstruction* code_point = invoke->InputAt(1);
1662 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001663 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) > 0xFFFFU) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001664 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1665 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1666 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1667 codegen->AddSlowPath(slow_path);
1668 __ B(slow_path->GetEntryLabel());
1669 __ Bind(slow_path->GetExitLabel());
1670 return;
1671 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001672 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001673 Register char_reg = WRegisterFrom(locations->InAt(1));
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001674 __ Tst(char_reg, 0xFFFF0000);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001675 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1676 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001677 __ B(ne, slow_path->GetEntryLabel());
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001678 }
1679
1680 if (start_at_zero) {
1681 // Start-index = 0.
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001682 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001683 __ Mov(tmp_reg, 0);
1684 }
1685
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001686 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001687 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001688
1689 if (slow_path != nullptr) {
1690 __ Bind(slow_path->GetExitLabel());
1691 }
1692}
1693
1694void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1695 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001696 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001697 kIntrinsified);
1698 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1699 // best to align the inputs accordingly.
1700 InvokeRuntimeCallingConvention calling_convention;
1701 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1702 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1703 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1704
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001705 // Need to send start_index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001706 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1707}
1708
1709void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001710 GenerateVisitStringIndexOf(
1711 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001712}
1713
1714void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1715 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001716 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001717 kIntrinsified);
1718 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1719 // best to align the inputs accordingly.
1720 InvokeRuntimeCallingConvention calling_convention;
1721 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1722 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1723 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1724 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001725}
1726
1727void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001728 GenerateVisitStringIndexOf(
1729 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001730}
1731
Jeff Hao848f70a2014-01-15 13:49:50 -08001732void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1733 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001734 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001735 kIntrinsified);
1736 InvokeRuntimeCallingConvention calling_convention;
1737 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1738 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1739 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1740 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1741 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1742}
1743
1744void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001745 MacroAssembler* masm = GetVIXLAssembler();
Jeff Hao848f70a2014-01-15 13:49:50 -08001746 LocationSummary* locations = invoke->GetLocations();
1747
1748 Register byte_array = WRegisterFrom(locations->InAt(0));
1749 __ Cmp(byte_array, 0);
1750 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1751 codegen_->AddSlowPath(slow_path);
1752 __ B(eq, slow_path->GetEntryLabel());
1753
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001754 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001755 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001756 __ Bind(slow_path->GetExitLabel());
1757}
1758
1759void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1760 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001761 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001762 kIntrinsified);
1763 InvokeRuntimeCallingConvention calling_convention;
1764 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1765 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1766 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1767 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1768}
1769
1770void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001771 // No need to emit code checking whether `locations->InAt(2)` is a null
1772 // pointer, as callers of the native method
1773 //
1774 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1775 //
1776 // all include a null check on `data` before calling that method.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001777 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001778 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001779}
1780
1781void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001782 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001783 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001784 kIntrinsified);
1785 InvokeRuntimeCallingConvention calling_convention;
1786 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Jeff Hao848f70a2014-01-15 13:49:50 -08001787 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1788}
1789
1790void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001791 MacroAssembler* masm = GetVIXLAssembler();
Jeff Hao848f70a2014-01-15 13:49:50 -08001792 LocationSummary* locations = invoke->GetLocations();
1793
1794 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1795 __ Cmp(string_to_copy, 0);
1796 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1797 codegen_->AddSlowPath(slow_path);
1798 __ B(eq, slow_path->GetEntryLabel());
1799
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001800 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001801 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001802 __ Bind(slow_path->GetExitLabel());
1803}
1804
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001805static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1806 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1807 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1808 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1809
1810 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001811 LocationSummary::kCallOnMainOnly,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001812 kIntrinsified);
1813 InvokeRuntimeCallingConvention calling_convention;
1814
1815 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1816 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1817}
1818
1819static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1820 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1821 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1822 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
1823 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1824
1825 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001826 LocationSummary::kCallOnMainOnly,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001827 kIntrinsified);
1828 InvokeRuntimeCallingConvention calling_convention;
1829
1830 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1831 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
1832 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1833}
1834
1835static void GenFPToFPCall(HInvoke* invoke,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001836 CodeGeneratorARM64* codegen,
1837 QuickEntrypointEnum entry) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001838 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001839}
1840
1841void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
1842 CreateFPToFPCallLocations(arena_, invoke);
1843}
1844
1845void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001846 GenFPToFPCall(invoke, codegen_, kQuickCos);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001847}
1848
1849void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) {
1850 CreateFPToFPCallLocations(arena_, invoke);
1851}
1852
1853void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001854 GenFPToFPCall(invoke, codegen_, kQuickSin);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001855}
1856
1857void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) {
1858 CreateFPToFPCallLocations(arena_, invoke);
1859}
1860
1861void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001862 GenFPToFPCall(invoke, codegen_, kQuickAcos);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001863}
1864
1865void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) {
1866 CreateFPToFPCallLocations(arena_, invoke);
1867}
1868
1869void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001870 GenFPToFPCall(invoke, codegen_, kQuickAsin);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001871}
1872
1873void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) {
1874 CreateFPToFPCallLocations(arena_, invoke);
1875}
1876
1877void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001878 GenFPToFPCall(invoke, codegen_, kQuickAtan);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001879}
1880
1881void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) {
1882 CreateFPToFPCallLocations(arena_, invoke);
1883}
1884
1885void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001886 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001887}
1888
1889void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) {
1890 CreateFPToFPCallLocations(arena_, invoke);
1891}
1892
1893void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001894 GenFPToFPCall(invoke, codegen_, kQuickCosh);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001895}
1896
1897void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) {
1898 CreateFPToFPCallLocations(arena_, invoke);
1899}
1900
1901void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001902 GenFPToFPCall(invoke, codegen_, kQuickExp);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001903}
1904
1905void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) {
1906 CreateFPToFPCallLocations(arena_, invoke);
1907}
1908
1909void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001910 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001911}
1912
1913void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) {
1914 CreateFPToFPCallLocations(arena_, invoke);
1915}
1916
1917void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001918 GenFPToFPCall(invoke, codegen_, kQuickLog);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001919}
1920
1921void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) {
1922 CreateFPToFPCallLocations(arena_, invoke);
1923}
1924
1925void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001926 GenFPToFPCall(invoke, codegen_, kQuickLog10);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001927}
1928
1929void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) {
1930 CreateFPToFPCallLocations(arena_, invoke);
1931}
1932
1933void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001934 GenFPToFPCall(invoke, codegen_, kQuickSinh);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001935}
1936
1937void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) {
1938 CreateFPToFPCallLocations(arena_, invoke);
1939}
1940
1941void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001942 GenFPToFPCall(invoke, codegen_, kQuickTan);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001943}
1944
1945void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) {
1946 CreateFPToFPCallLocations(arena_, invoke);
1947}
1948
1949void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001950 GenFPToFPCall(invoke, codegen_, kQuickTanh);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001951}
1952
1953void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) {
1954 CreateFPFPToFPCallLocations(arena_, invoke);
1955}
1956
1957void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001958 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001959}
1960
1961void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) {
1962 CreateFPFPToFPCallLocations(arena_, invoke);
1963}
1964
1965void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001966 GenFPToFPCall(invoke, codegen_, kQuickHypot);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001967}
1968
1969void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) {
1970 CreateFPFPToFPCallLocations(arena_, invoke);
1971}
1972
1973void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001974 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001975}
1976
Tim Zhang25abd6c2016-01-19 23:39:24 +08001977void IntrinsicLocationsBuilderARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1978 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1979 LocationSummary::kNoCall,
1980 kIntrinsified);
1981 locations->SetInAt(0, Location::RequiresRegister());
1982 locations->SetInAt(1, Location::RequiresRegister());
1983 locations->SetInAt(2, Location::RequiresRegister());
1984 locations->SetInAt(3, Location::RequiresRegister());
1985 locations->SetInAt(4, Location::RequiresRegister());
1986
1987 locations->AddTemp(Location::RequiresRegister());
1988 locations->AddTemp(Location::RequiresRegister());
Scott Wakelingdf109d92016-04-22 11:35:56 +01001989 locations->AddTemp(Location::RequiresRegister());
Tim Zhang25abd6c2016-01-19 23:39:24 +08001990}
1991
1992void IntrinsicCodeGeneratorARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001993 MacroAssembler* masm = GetVIXLAssembler();
Tim Zhang25abd6c2016-01-19 23:39:24 +08001994 LocationSummary* locations = invoke->GetLocations();
1995
1996 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1997 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1998 DCHECK_EQ(char_size, 2u);
1999
2000 // Location of data in char array buffer.
2001 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2002
2003 // Location of char array data in string.
2004 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2005
2006 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2007 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2008 Register srcObj = XRegisterFrom(locations->InAt(0));
2009 Register srcBegin = XRegisterFrom(locations->InAt(1));
2010 Register srcEnd = XRegisterFrom(locations->InAt(2));
2011 Register dstObj = XRegisterFrom(locations->InAt(3));
2012 Register dstBegin = XRegisterFrom(locations->InAt(4));
2013
2014 Register src_ptr = XRegisterFrom(locations->GetTemp(0));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002015 Register num_chr = XRegisterFrom(locations->GetTemp(1));
2016 Register tmp1 = XRegisterFrom(locations->GetTemp(2));
Tim Zhang25abd6c2016-01-19 23:39:24 +08002017
2018 UseScratchRegisterScope temps(masm);
2019 Register dst_ptr = temps.AcquireX();
Scott Wakelingdf109d92016-04-22 11:35:56 +01002020 Register tmp2 = temps.AcquireX();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002021
jessicahandojo05765752016-09-09 19:01:32 -07002022 vixl::aarch64::Label done;
2023 vixl::aarch64::Label compressed_string_loop;
2024 __ Sub(num_chr, srcEnd, srcBegin);
2025 // Early out for valid zero-length retrievals.
2026 __ Cbz(num_chr, &done);
Tim Zhang25abd6c2016-01-19 23:39:24 +08002027
Scott Wakelingdf109d92016-04-22 11:35:56 +01002028 // dst address start to copy to.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002029 __ Add(dst_ptr, dstObj, Operand(data_offset));
2030 __ Add(dst_ptr, dst_ptr, Operand(dstBegin, LSL, 1));
2031
jessicahandojo05765752016-09-09 19:01:32 -07002032 // src address to copy from.
2033 __ Add(src_ptr, srcObj, Operand(value_offset));
2034 vixl::aarch64::Label compressed_string_preloop;
2035 if (mirror::kUseStringCompression) {
2036 // Location of count in string.
2037 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2038 // String's length.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002039 __ Ldr(tmp2, MemOperand(srcObj, count_offset));
2040 __ Tbz(tmp2, 0, &compressed_string_preloop);
jessicahandojo05765752016-09-09 19:01:32 -07002041 }
2042 __ Add(src_ptr, src_ptr, Operand(srcBegin, LSL, 1));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002043
Tim Zhang25abd6c2016-01-19 23:39:24 +08002044 // Do the copy.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002045 vixl::aarch64::Label loop;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002046 vixl::aarch64::Label remainder;
Scott Wakelingdf109d92016-04-22 11:35:56 +01002047
Scott Wakelingdf109d92016-04-22 11:35:56 +01002048 // Save repairing the value of num_chr on the < 8 character path.
2049 __ Subs(tmp1, num_chr, 8);
2050 __ B(lt, &remainder);
2051
2052 // Keep the result of the earlier subs, we are going to fetch at least 8 characters.
2053 __ Mov(num_chr, tmp1);
2054
2055 // Main loop used for longer fetches loads and stores 8x16-bit characters at a time.
2056 // (Unaligned addresses are acceptable here and not worth inlining extra code to rectify.)
Tim Zhang25abd6c2016-01-19 23:39:24 +08002057 __ Bind(&loop);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002058 __ Ldp(tmp1, tmp2, MemOperand(src_ptr, char_size * 8, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002059 __ Subs(num_chr, num_chr, 8);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002060 __ Stp(tmp1, tmp2, MemOperand(dst_ptr, char_size * 8, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002061 __ B(ge, &loop);
2062
2063 __ Adds(num_chr, num_chr, 8);
2064 __ B(eq, &done);
2065
2066 // Main loop for < 8 character case and remainder handling. Loads and stores one
2067 // 16-bit Java character at a time.
2068 __ Bind(&remainder);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002069 __ Ldrh(tmp1, MemOperand(src_ptr, char_size, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002070 __ Subs(num_chr, num_chr, 1);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002071 __ Strh(tmp1, MemOperand(dst_ptr, char_size, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01002072 __ B(gt, &remainder);
jessicahandojo05765752016-09-09 19:01:32 -07002073 __ B(&done);
2074
2075 if (mirror::kUseStringCompression) {
2076 const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
2077 DCHECK_EQ(c_char_size, 1u);
2078 __ Bind(&compressed_string_preloop);
2079 __ Add(src_ptr, src_ptr, Operand(srcBegin));
2080 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2081 __ Bind(&compressed_string_loop);
2082 __ Ldrb(tmp1, MemOperand(src_ptr, c_char_size, PostIndex));
2083 __ Strh(tmp1, MemOperand(dst_ptr, char_size, PostIndex));
2084 __ Subs(num_chr, num_chr, Operand(1));
2085 __ B(gt, &compressed_string_loop);
2086 }
Scott Wakelingdf109d92016-04-22 11:35:56 +01002087
Tim Zhang25abd6c2016-01-19 23:39:24 +08002088 __ Bind(&done);
2089}
2090
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002091// Mirrors ARRAYCOPY_SHORT_CHAR_ARRAY_THRESHOLD in libcore, so we can choose to use the native
2092// implementation there for longer copy lengths.
donghui.baic2ec9ad2016-03-10 14:02:55 +08002093static constexpr int32_t kSystemArrayCopyCharThreshold = 32;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002094
2095static void SetSystemArrayCopyLocationRequires(LocationSummary* locations,
2096 uint32_t at,
2097 HInstruction* input) {
2098 HIntConstant* const_input = input->AsIntConstant();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002099 if (const_input != nullptr && !vixl::aarch64::Assembler::IsImmAddSub(const_input->GetValue())) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002100 locations->SetInAt(at, Location::RequiresRegister());
2101 } else {
2102 locations->SetInAt(at, Location::RegisterOrConstant(input));
2103 }
2104}
2105
2106void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2107 // Check to see if we have known failures that will cause us to have to bail out
2108 // to the runtime, and just generate the runtime call directly.
2109 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2110 HIntConstant* dst_pos = invoke->InputAt(3)->AsIntConstant();
2111
2112 // The positions must be non-negative.
2113 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2114 (dst_pos != nullptr && dst_pos->GetValue() < 0)) {
2115 // We will have to fail anyways.
2116 return;
2117 }
2118
2119 // The length must be >= 0 and not so long that we would (currently) prefer libcore's
2120 // native implementation.
2121 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2122 if (length != nullptr) {
2123 int32_t len = length->GetValue();
donghui.baic2ec9ad2016-03-10 14:02:55 +08002124 if (len < 0 || len > kSystemArrayCopyCharThreshold) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002125 // Just call as normal.
2126 return;
2127 }
2128 }
2129
2130 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
2131 LocationSummary* locations = new (allocator) LocationSummary(invoke,
2132 LocationSummary::kCallOnSlowPath,
2133 kIntrinsified);
2134 // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length).
2135 locations->SetInAt(0, Location::RequiresRegister());
2136 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
2137 locations->SetInAt(2, Location::RequiresRegister());
2138 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
2139 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
2140
2141 locations->AddTemp(Location::RequiresRegister());
2142 locations->AddTemp(Location::RequiresRegister());
2143 locations->AddTemp(Location::RequiresRegister());
2144}
2145
Scott Wakeling97c72b72016-06-24 16:19:36 +01002146static void CheckSystemArrayCopyPosition(MacroAssembler* masm,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002147 const Location& pos,
2148 const Register& input,
2149 const Location& length,
2150 SlowPathCodeARM64* slow_path,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002151 const Register& temp,
2152 bool length_is_input_length = false) {
2153 const int32_t length_offset = mirror::Array::LengthOffset().Int32Value();
2154 if (pos.IsConstant()) {
2155 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2156 if (pos_const == 0) {
2157 if (!length_is_input_length) {
2158 // Check that length(input) >= length.
2159 __ Ldr(temp, MemOperand(input, length_offset));
2160 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
2161 __ B(slow_path->GetEntryLabel(), lt);
2162 }
2163 } else {
2164 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002165 __ Ldr(temp, MemOperand(input, length_offset));
2166 __ Subs(temp, temp, pos_const);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002167 __ B(slow_path->GetEntryLabel(), lt);
2168
2169 // Check that (length(input) - pos) >= length.
2170 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
2171 __ B(slow_path->GetEntryLabel(), lt);
2172 }
2173 } else if (length_is_input_length) {
2174 // The only way the copy can succeed is if pos is zero.
2175 __ Cbnz(WRegisterFrom(pos), slow_path->GetEntryLabel());
2176 } else {
2177 // Check that pos >= 0.
2178 Register pos_reg = WRegisterFrom(pos);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002179 __ Tbnz(pos_reg, pos_reg.GetSizeInBits() - 1, slow_path->GetEntryLabel());
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002180
2181 // Check that pos <= length(input) && (length(input) - pos) >= length.
2182 __ Ldr(temp, MemOperand(input, length_offset));
2183 __ Subs(temp, temp, pos_reg);
2184 // Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt).
2185 __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge);
2186 __ B(slow_path->GetEntryLabel(), lt);
2187 }
2188}
2189
2190// Compute base source address, base destination address, and end source address
2191// for System.arraycopy* intrinsics.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002192static void GenSystemArrayCopyAddresses(MacroAssembler* masm,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002193 Primitive::Type type,
2194 const Register& src,
2195 const Location& src_pos,
2196 const Register& dst,
2197 const Location& dst_pos,
2198 const Location& copy_length,
2199 const Register& src_base,
2200 const Register& dst_base,
2201 const Register& src_end) {
2202 DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar)
Roland Levillainebea3d22016-04-12 15:42:57 +01002203 << "Unexpected element type: " << type;
2204 const int32_t element_size = Primitive::ComponentSize(type);
2205 const int32_t element_size_shift = Primitive::ComponentSizeShift(type);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002206
Roland Levillainebea3d22016-04-12 15:42:57 +01002207 uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002208 if (src_pos.IsConstant()) {
2209 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01002210 __ Add(src_base, src, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002211 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01002212 __ Add(src_base, src, data_offset);
2213 __ Add(src_base, src_base, Operand(XRegisterFrom(src_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002214 }
2215
2216 if (dst_pos.IsConstant()) {
2217 int32_t constant = dst_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01002218 __ Add(dst_base, dst, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002219 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01002220 __ Add(dst_base, dst, data_offset);
2221 __ Add(dst_base, dst_base, Operand(XRegisterFrom(dst_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002222 }
2223
2224 if (copy_length.IsConstant()) {
2225 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01002226 __ Add(src_end, src_base, element_size * constant);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002227 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01002228 __ Add(src_end, src_base, Operand(XRegisterFrom(copy_length), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002229 }
2230}
2231
2232void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002233 MacroAssembler* masm = GetVIXLAssembler();
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002234 LocationSummary* locations = invoke->GetLocations();
2235 Register src = XRegisterFrom(locations->InAt(0));
2236 Location src_pos = locations->InAt(1);
2237 Register dst = XRegisterFrom(locations->InAt(2));
2238 Location dst_pos = locations->InAt(3);
2239 Location length = locations->InAt(4);
2240
2241 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2242 codegen_->AddSlowPath(slow_path);
2243
2244 // If source and destination are the same, take the slow path. Overlapping copy regions must be
2245 // copied in reverse and we can't know in all cases if it's needed.
2246 __ Cmp(src, dst);
2247 __ B(slow_path->GetEntryLabel(), eq);
2248
2249 // Bail out if the source is null.
2250 __ Cbz(src, slow_path->GetEntryLabel());
2251
2252 // Bail out if the destination is null.
2253 __ Cbz(dst, slow_path->GetEntryLabel());
2254
2255 if (!length.IsConstant()) {
Vladimir Markoc5646202016-11-28 16:03:15 +00002256 // Merge the following two comparisons into one:
2257 // If the length is negative, bail out (delegate to libcore's native implementation).
2258 // If the length > 32 then (currently) prefer libcore's native implementation.
donghui.baic2ec9ad2016-03-10 14:02:55 +08002259 __ Cmp(WRegisterFrom(length), kSystemArrayCopyCharThreshold);
Vladimir Markoc5646202016-11-28 16:03:15 +00002260 __ B(slow_path->GetEntryLabel(), hi);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002261 } else {
2262 // We have already checked in the LocationsBuilder for the constant case.
2263 DCHECK_GE(length.GetConstant()->AsIntConstant()->GetValue(), 0);
2264 DCHECK_LE(length.GetConstant()->AsIntConstant()->GetValue(), 32);
2265 }
2266
2267 Register src_curr_addr = WRegisterFrom(locations->GetTemp(0));
2268 Register dst_curr_addr = WRegisterFrom(locations->GetTemp(1));
2269 Register src_stop_addr = WRegisterFrom(locations->GetTemp(2));
2270
2271 CheckSystemArrayCopyPosition(masm,
2272 src_pos,
2273 src,
2274 length,
2275 slow_path,
2276 src_curr_addr,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002277 false);
2278
2279 CheckSystemArrayCopyPosition(masm,
2280 dst_pos,
2281 dst,
2282 length,
2283 slow_path,
2284 src_curr_addr,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002285 false);
2286
2287 src_curr_addr = src_curr_addr.X();
2288 dst_curr_addr = dst_curr_addr.X();
2289 src_stop_addr = src_stop_addr.X();
2290
2291 GenSystemArrayCopyAddresses(masm,
2292 Primitive::kPrimChar,
2293 src,
2294 src_pos,
2295 dst,
2296 dst_pos,
2297 length,
2298 src_curr_addr,
2299 dst_curr_addr,
2300 src_stop_addr);
2301
2302 // Iterate over the arrays and do a raw copy of the chars.
2303 const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2304 UseScratchRegisterScope temps(masm);
2305 Register tmp = temps.AcquireW();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002306 vixl::aarch64::Label loop, done;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002307 __ Bind(&loop);
2308 __ Cmp(src_curr_addr, src_stop_addr);
2309 __ B(&done, eq);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002310 __ Ldrh(tmp, MemOperand(src_curr_addr, char_size, PostIndex));
2311 __ Strh(tmp, MemOperand(dst_curr_addr, char_size, PostIndex));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002312 __ B(&loop);
2313 __ Bind(&done);
2314
2315 __ Bind(slow_path->GetExitLabel());
2316}
2317
donghui.baic2ec9ad2016-03-10 14:02:55 +08002318// We can choose to use the native implementation there for longer copy lengths.
2319static constexpr int32_t kSystemArrayCopyThreshold = 128;
2320
2321// CodeGenerator::CreateSystemArrayCopyLocationSummary use three temporary registers.
2322// We want to use two temporary registers in order to reduce the register pressure in arm64.
2323// So we don't use the CodeGenerator::CreateSystemArrayCopyLocationSummary.
2324void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002325 // The only read barrier implementation supporting the
2326 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2327 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain3d312422016-06-23 13:53:42 +01002328 return;
2329 }
2330
donghui.baic2ec9ad2016-03-10 14:02:55 +08002331 // Check to see if we have known failures that will cause us to have to bail out
2332 // to the runtime, and just generate the runtime call directly.
2333 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2334 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2335
2336 // The positions must be non-negative.
2337 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2338 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2339 // We will have to fail anyways.
2340 return;
2341 }
2342
2343 // The length must be >= 0.
2344 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2345 if (length != nullptr) {
2346 int32_t len = length->GetValue();
2347 if (len < 0 || len >= kSystemArrayCopyThreshold) {
2348 // Just call as normal.
2349 return;
2350 }
2351 }
2352
2353 SystemArrayCopyOptimizations optimizations(invoke);
2354
2355 if (optimizations.GetDestinationIsSource()) {
2356 if (src_pos != nullptr && dest_pos != nullptr && src_pos->GetValue() < dest_pos->GetValue()) {
2357 // We only support backward copying if source and destination are the same.
2358 return;
2359 }
2360 }
2361
2362 if (optimizations.GetDestinationIsPrimitiveArray() || optimizations.GetSourceIsPrimitiveArray()) {
2363 // We currently don't intrinsify primitive copying.
2364 return;
2365 }
2366
2367 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
2368 LocationSummary* locations = new (allocator) LocationSummary(invoke,
2369 LocationSummary::kCallOnSlowPath,
2370 kIntrinsified);
2371 // arraycopy(Object src, int src_pos, Object dest, int dest_pos, int length).
2372 locations->SetInAt(0, Location::RequiresRegister());
2373 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
2374 locations->SetInAt(2, Location::RequiresRegister());
2375 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
2376 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
2377
2378 locations->AddTemp(Location::RequiresRegister());
2379 locations->AddTemp(Location::RequiresRegister());
Roland Levillain0b671c02016-08-19 12:02:34 +01002380 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2381 // Temporary register IP0, obtained from the VIXL scratch register
2382 // pool, cannot be used in ReadBarrierSystemArrayCopySlowPathARM64
2383 // (because that register is clobbered by ReadBarrierMarkRegX
2384 // entry points). Get an extra temporary register from the
2385 // register allocator.
2386 locations->AddTemp(Location::RequiresRegister());
2387 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002388}
2389
2390void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002391 // The only read barrier implementation supporting the
2392 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2393 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01002394
Scott Wakeling97c72b72016-06-24 16:19:36 +01002395 MacroAssembler* masm = GetVIXLAssembler();
donghui.baic2ec9ad2016-03-10 14:02:55 +08002396 LocationSummary* locations = invoke->GetLocations();
2397
2398 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2399 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2400 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2401 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002402 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
donghui.baic2ec9ad2016-03-10 14:02:55 +08002403
2404 Register src = XRegisterFrom(locations->InAt(0));
2405 Location src_pos = locations->InAt(1);
2406 Register dest = XRegisterFrom(locations->InAt(2));
2407 Location dest_pos = locations->InAt(3);
2408 Location length = locations->InAt(4);
2409 Register temp1 = WRegisterFrom(locations->GetTemp(0));
Roland Levillain0b671c02016-08-19 12:02:34 +01002410 Location temp1_loc = LocationFrom(temp1);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002411 Register temp2 = WRegisterFrom(locations->GetTemp(1));
Roland Levillain0b671c02016-08-19 12:02:34 +01002412 Location temp2_loc = LocationFrom(temp2);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002413
Roland Levillain0b671c02016-08-19 12:02:34 +01002414 SlowPathCodeARM64* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2415 codegen_->AddSlowPath(intrinsic_slow_path);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002416
Scott Wakeling97c72b72016-06-24 16:19:36 +01002417 vixl::aarch64::Label conditions_on_positions_validated;
donghui.baic2ec9ad2016-03-10 14:02:55 +08002418 SystemArrayCopyOptimizations optimizations(invoke);
2419
donghui.baic2ec9ad2016-03-10 14:02:55 +08002420 // If source and destination are the same, we go to slow path if we need to do
2421 // forward copying.
2422 if (src_pos.IsConstant()) {
2423 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2424 if (dest_pos.IsConstant()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002425 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2426 if (optimizations.GetDestinationIsSource()) {
2427 // Checked when building locations.
2428 DCHECK_GE(src_pos_constant, dest_pos_constant);
2429 } else if (src_pos_constant < dest_pos_constant) {
2430 __ Cmp(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002431 __ B(intrinsic_slow_path->GetEntryLabel(), eq);
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002432 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002433 // Checked when building locations.
2434 DCHECK(!optimizations.GetDestinationIsSource()
2435 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
2436 } else {
2437 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002438 __ Cmp(src, dest);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002439 __ B(&conditions_on_positions_validated, ne);
2440 }
2441 __ Cmp(WRegisterFrom(dest_pos), src_pos_constant);
Roland Levillain0b671c02016-08-19 12:02:34 +01002442 __ B(intrinsic_slow_path->GetEntryLabel(), gt);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002443 }
2444 } else {
2445 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002446 __ Cmp(src, dest);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002447 __ B(&conditions_on_positions_validated, ne);
2448 }
2449 __ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()),
2450 OperandFrom(dest_pos, invoke->InputAt(3)->GetType()));
Roland Levillain0b671c02016-08-19 12:02:34 +01002451 __ B(intrinsic_slow_path->GetEntryLabel(), lt);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002452 }
2453
2454 __ Bind(&conditions_on_positions_validated);
2455
2456 if (!optimizations.GetSourceIsNotNull()) {
2457 // Bail out if the source is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01002458 __ Cbz(src, intrinsic_slow_path->GetEntryLabel());
donghui.baic2ec9ad2016-03-10 14:02:55 +08002459 }
2460
2461 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2462 // Bail out if the destination is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01002463 __ Cbz(dest, intrinsic_slow_path->GetEntryLabel());
donghui.baic2ec9ad2016-03-10 14:02:55 +08002464 }
2465
2466 // We have already checked in the LocationsBuilder for the constant case.
2467 if (!length.IsConstant() &&
2468 !optimizations.GetCountIsSourceLength() &&
2469 !optimizations.GetCountIsDestinationLength()) {
Vladimir Markoc5646202016-11-28 16:03:15 +00002470 // Merge the following two comparisons into one:
2471 // If the length is negative, bail out (delegate to libcore's native implementation).
2472 // If the length >= 128 then (currently) prefer native implementation.
donghui.baic2ec9ad2016-03-10 14:02:55 +08002473 __ Cmp(WRegisterFrom(length), kSystemArrayCopyThreshold);
Vladimir Markoc5646202016-11-28 16:03:15 +00002474 __ B(intrinsic_slow_path->GetEntryLabel(), hs);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002475 }
2476 // Validity checks: source.
2477 CheckSystemArrayCopyPosition(masm,
2478 src_pos,
2479 src,
2480 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002481 intrinsic_slow_path,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002482 temp1,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002483 optimizations.GetCountIsSourceLength());
2484
2485 // Validity checks: dest.
2486 CheckSystemArrayCopyPosition(masm,
2487 dest_pos,
2488 dest,
2489 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002490 intrinsic_slow_path,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002491 temp1,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002492 optimizations.GetCountIsDestinationLength());
2493 {
2494 // We use a block to end the scratch scope before the write barrier, thus
2495 // freeing the temporary registers so they can be used in `MarkGCCard`.
2496 UseScratchRegisterScope temps(masm);
Roland Levillain0b671c02016-08-19 12:02:34 +01002497 // Note: Because it is acquired from VIXL's scratch register pool,
2498 // `temp3` might be IP0, and thus cannot be used as `ref` argument
2499 // of CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier
2500 // calls below (see ReadBarrierMarkSlowPathARM64 for more details).
donghui.baic2ec9ad2016-03-10 14:02:55 +08002501 Register temp3 = temps.AcquireW();
Roland Levillain0b671c02016-08-19 12:02:34 +01002502
donghui.baic2ec9ad2016-03-10 14:02:55 +08002503 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2504 // Check whether all elements of the source array are assignable to the component
2505 // type of the destination array. We do two checks: the classes are the same,
2506 // or the destination is Object[]. If none of these checks succeed, we go to the
2507 // slow path.
donghui.baic2ec9ad2016-03-10 14:02:55 +08002508
Roland Levillain0b671c02016-08-19 12:02:34 +01002509 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2510 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2511 // /* HeapReference<Class> */ temp1 = src->klass_
2512 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2513 temp1_loc,
2514 src.W(),
2515 class_offset,
2516 temp2,
2517 /* needs_null_check */ false,
2518 /* use_load_acquire */ false);
2519 // Bail out if the source is not a non primitive array.
2520 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2521 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2522 temp1_loc,
2523 temp1,
2524 component_offset,
2525 temp2,
2526 /* needs_null_check */ false,
2527 /* use_load_acquire */ false);
2528 __ Cbz(temp1, intrinsic_slow_path->GetEntryLabel());
2529 // If heap poisoning is enabled, `temp1` has been unpoisoned
2530 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2531 // /* uint16_t */ temp1 = static_cast<uint16>(temp1->primitive_type_);
2532 __ Ldrh(temp1, HeapOperand(temp1, primitive_offset));
2533 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2534 __ Cbnz(temp1, intrinsic_slow_path->GetEntryLabel());
donghui.baic2ec9ad2016-03-10 14:02:55 +08002535 }
Roland Levillain0b671c02016-08-19 12:02:34 +01002536
2537 // /* HeapReference<Class> */ temp1 = dest->klass_
2538 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2539 temp1_loc,
2540 dest.W(),
2541 class_offset,
2542 temp2,
2543 /* needs_null_check */ false,
2544 /* use_load_acquire */ false);
2545
2546 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2547 // Bail out if the destination is not a non primitive array.
2548 //
2549 // Register `temp1` is not trashed by the read barrier emitted
2550 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2551 // method produces a call to a ReadBarrierMarkRegX entry point,
2552 // which saves all potentially live registers, including
2553 // temporaries such a `temp1`.
2554 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2555 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2556 temp2_loc,
2557 temp1,
2558 component_offset,
2559 temp3,
2560 /* needs_null_check */ false,
2561 /* use_load_acquire */ false);
2562 __ Cbz(temp2, intrinsic_slow_path->GetEntryLabel());
2563 // If heap poisoning is enabled, `temp2` has been unpoisoned
2564 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2565 // /* uint16_t */ temp2 = static_cast<uint16>(temp2->primitive_type_);
2566 __ Ldrh(temp2, HeapOperand(temp2, primitive_offset));
2567 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2568 __ Cbnz(temp2, intrinsic_slow_path->GetEntryLabel());
2569 }
2570
2571 // For the same reason given earlier, `temp1` is not trashed by the
2572 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2573 // /* HeapReference<Class> */ temp2 = src->klass_
2574 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2575 temp2_loc,
2576 src.W(),
2577 class_offset,
2578 temp3,
2579 /* needs_null_check */ false,
2580 /* use_load_acquire */ false);
2581 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2582 __ Cmp(temp1, temp2);
2583
2584 if (optimizations.GetDestinationIsTypedObjectArray()) {
2585 vixl::aarch64::Label do_copy;
2586 __ B(&do_copy, eq);
2587 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2588 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2589 temp1_loc,
2590 temp1,
2591 component_offset,
2592 temp2,
2593 /* needs_null_check */ false,
2594 /* use_load_acquire */ false);
2595 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2596 // We do not need to emit a read barrier for the following
2597 // heap reference load, as `temp1` is only used in a
2598 // comparison with null below, and this reference is not
2599 // kept afterwards.
2600 __ Ldr(temp1, HeapOperand(temp1, super_offset));
2601 __ Cbnz(temp1, intrinsic_slow_path->GetEntryLabel());
2602 __ Bind(&do_copy);
2603 } else {
2604 __ B(intrinsic_slow_path->GetEntryLabel(), ne);
2605 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002606 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01002607 // Non read barrier code.
2608
2609 // /* HeapReference<Class> */ temp1 = dest->klass_
2610 __ Ldr(temp1, MemOperand(dest, class_offset));
2611 // /* HeapReference<Class> */ temp2 = src->klass_
2612 __ Ldr(temp2, MemOperand(src, class_offset));
2613 bool did_unpoison = false;
2614 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
2615 !optimizations.GetSourceIsNonPrimitiveArray()) {
2616 // One or two of the references need to be unpoisoned. Unpoison them
2617 // both to make the identity check valid.
2618 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2619 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2);
2620 did_unpoison = true;
2621 }
2622
2623 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2624 // Bail out if the destination is not a non primitive array.
2625 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2626 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2627 __ Cbz(temp3, intrinsic_slow_path->GetEntryLabel());
2628 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2629 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
2630 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2631 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2632 __ Cbnz(temp3, intrinsic_slow_path->GetEntryLabel());
2633 }
2634
2635 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2636 // Bail out if the source is not a non primitive array.
2637 // /* HeapReference<Class> */ temp3 = temp2->component_type_
2638 __ Ldr(temp3, HeapOperand(temp2, component_offset));
2639 __ Cbz(temp3, intrinsic_slow_path->GetEntryLabel());
2640 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2641 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
2642 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2643 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2644 __ Cbnz(temp3, intrinsic_slow_path->GetEntryLabel());
2645 }
2646
2647 __ Cmp(temp1, temp2);
2648
2649 if (optimizations.GetDestinationIsTypedObjectArray()) {
2650 vixl::aarch64::Label do_copy;
2651 __ B(&do_copy, eq);
2652 if (!did_unpoison) {
2653 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2654 }
2655 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2656 __ Ldr(temp1, HeapOperand(temp1, component_offset));
2657 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2658 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2659 __ Ldr(temp1, HeapOperand(temp1, super_offset));
2660 // No need to unpoison the result, we're comparing against null.
2661 __ Cbnz(temp1, intrinsic_slow_path->GetEntryLabel());
2662 __ Bind(&do_copy);
2663 } else {
2664 __ B(intrinsic_slow_path->GetEntryLabel(), ne);
2665 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002666 }
2667 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2668 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2669 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01002670 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2671 // /* HeapReference<Class> */ temp1 = src->klass_
2672 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2673 temp1_loc,
2674 src.W(),
2675 class_offset,
2676 temp2,
2677 /* needs_null_check */ false,
2678 /* use_load_acquire */ false);
2679 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2680 codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke,
2681 temp2_loc,
2682 temp1,
2683 component_offset,
2684 temp3,
2685 /* needs_null_check */ false,
2686 /* use_load_acquire */ false);
2687 __ Cbz(temp2, intrinsic_slow_path->GetEntryLabel());
2688 // If heap poisoning is enabled, `temp2` has been unpoisoned
2689 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2690 } else {
2691 // /* HeapReference<Class> */ temp1 = src->klass_
2692 __ Ldr(temp1, HeapOperand(src.W(), class_offset));
2693 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2694 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2695 __ Ldr(temp2, HeapOperand(temp1, component_offset));
2696 __ Cbz(temp2, intrinsic_slow_path->GetEntryLabel());
2697 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2);
2698 }
2699 // /* uint16_t */ temp2 = static_cast<uint16>(temp2->primitive_type_);
2700 __ Ldrh(temp2, HeapOperand(temp2, primitive_offset));
donghui.baic2ec9ad2016-03-10 14:02:55 +08002701 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain0b671c02016-08-19 12:02:34 +01002702 __ Cbnz(temp2, intrinsic_slow_path->GetEntryLabel());
donghui.baic2ec9ad2016-03-10 14:02:55 +08002703 }
2704
2705 Register src_curr_addr = temp1.X();
2706 Register dst_curr_addr = temp2.X();
Roland Levillain0b671c02016-08-19 12:02:34 +01002707 Register src_stop_addr;
2708 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2709 // Temporary register IP0, obtained from the VIXL scratch
2710 // register pool as `temp3`, cannot be used in
2711 // ReadBarrierSystemArrayCopySlowPathARM64 (because that
2712 // register is clobbered by ReadBarrierMarkRegX entry points).
2713 // So another temporary register allocated by the register
2714 // allocator instead.
2715 DCHECK_EQ(LocationFrom(temp3).reg(), IP0);
2716 src_stop_addr = XRegisterFrom(locations->GetTemp(2));
2717 } else {
2718 src_stop_addr = temp3.X();
2719 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002720
2721 GenSystemArrayCopyAddresses(masm,
2722 Primitive::kPrimNot,
2723 src,
2724 src_pos,
2725 dest,
2726 dest_pos,
2727 length,
2728 src_curr_addr,
2729 dst_curr_addr,
2730 src_stop_addr);
2731
donghui.baic2ec9ad2016-03-10 14:02:55 +08002732 const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
Roland Levillain0b671c02016-08-19 12:02:34 +01002733
2734 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2735 // SystemArrayCopy implementation for Baker read barriers (see
2736 // also CodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier):
2737 //
2738 // if (src_ptr != end_ptr) {
2739 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
2740 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002741 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain0b671c02016-08-19 12:02:34 +01002742 // if (is_gray) {
2743 // // Slow-path copy.
2744 // do {
2745 // *dest_ptr++ = MaybePoison(ReadBarrier::Mark(MaybeUnpoison(*src_ptr++)));
2746 // } while (src_ptr != end_ptr)
2747 // } else {
2748 // // Fast-path copy.
2749 // do {
2750 // *dest_ptr++ = *src_ptr++;
2751 // } while (src_ptr != end_ptr)
2752 // }
2753 // }
2754
2755 vixl::aarch64::Label loop, done;
2756
2757 // Don't enter copy loop if `length == 0`.
2758 __ Cmp(src_curr_addr, src_stop_addr);
2759 __ B(&done, eq);
2760
donghui.baic2ec9ad2016-03-10 14:02:55 +08002761 Register tmp = temps.AcquireW();
Roland Levillain0b671c02016-08-19 12:02:34 +01002762 // Make sure `tmp` is not IP0, as it is clobbered by
2763 // ReadBarrierMarkRegX entry points in
2764 // ReadBarrierSystemArrayCopySlowPathARM64.
2765 DCHECK_NE(LocationFrom(tmp).reg(), IP0);
2766
2767 // /* int32_t */ monitor = src->monitor_
2768 __ Ldr(tmp, HeapOperand(src.W(), monitor_offset));
2769 // /* LockWord */ lock_word = LockWord(monitor)
2770 static_assert(sizeof(LockWord) == sizeof(int32_t),
2771 "art::LockWord and int32_t have different sizes.");
2772
2773 // Introduce a dependency on the lock_word including rb_state,
2774 // to prevent load-load reordering, and without using
2775 // a memory barrier (which would be more expensive).
2776 // `src` is unchanged by this operation, but its value now depends
2777 // on `tmp`.
2778 __ Add(src.X(), src.X(), Operand(tmp.X(), LSR, 32));
2779
2780 // Slow path used to copy array when `src` is gray.
2781 SlowPathCodeARM64* read_barrier_slow_path =
2782 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathARM64(invoke, LocationFrom(tmp));
2783 codegen_->AddSlowPath(read_barrier_slow_path);
2784
2785 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002786 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
2787 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Roland Levillain0b671c02016-08-19 12:02:34 +01002788 __ Tbnz(tmp, LockWord::kReadBarrierStateShift, read_barrier_slow_path->GetEntryLabel());
2789
2790 // Fast-path copy.
2791 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2792 // poison/unpoison.
2793 __ Bind(&loop);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002794 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, PostIndex));
2795 __ Str(tmp, MemOperand(dst_curr_addr, element_size, PostIndex));
Roland Levillain0b671c02016-08-19 12:02:34 +01002796 __ Cmp(src_curr_addr, src_stop_addr);
2797 __ B(&loop, ne);
2798
2799 __ Bind(read_barrier_slow_path->GetExitLabel());
2800 __ Bind(&done);
2801 } else {
2802 // Non read barrier code.
2803
2804 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2805 // poison/unpoison.
2806 vixl::aarch64::Label loop, done;
2807 __ Bind(&loop);
2808 __ Cmp(src_curr_addr, src_stop_addr);
2809 __ B(&done, eq);
2810 {
2811 Register tmp = temps.AcquireW();
2812 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, PostIndex));
2813 __ Str(tmp, MemOperand(dst_curr_addr, element_size, PostIndex));
2814 }
2815 __ B(&loop);
2816 __ Bind(&done);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002817 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002818 }
2819 // We only need one card marking on the destination array.
2820 codegen_->MarkGCCard(dest.W(), Register(), /* value_can_be_null */ false);
2821
Roland Levillain0b671c02016-08-19 12:02:34 +01002822 __ Bind(intrinsic_slow_path->GetExitLabel());
donghui.baic2ec9ad2016-03-10 14:02:55 +08002823}
2824
Anton Kirilova3ffea22016-04-07 17:02:37 +01002825static void GenIsInfinite(LocationSummary* locations,
2826 bool is64bit,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002827 MacroAssembler* masm) {
Anton Kirilova3ffea22016-04-07 17:02:37 +01002828 Operand infinity;
2829 Register out;
2830
2831 if (is64bit) {
2832 infinity = kPositiveInfinityDouble;
2833 out = XRegisterFrom(locations->Out());
2834 } else {
2835 infinity = kPositiveInfinityFloat;
2836 out = WRegisterFrom(locations->Out());
2837 }
2838
Scott Wakeling97c72b72016-06-24 16:19:36 +01002839 const Register zero = vixl::aarch64::Assembler::AppropriateZeroRegFor(out);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002840
2841 MoveFPToInt(locations, is64bit, masm);
2842 __ Eor(out, out, infinity);
2843 // We don't care about the sign bit, so shift left.
2844 __ Cmp(zero, Operand(out, LSL, 1));
2845 __ Cset(out, eq);
2846}
2847
2848void IntrinsicLocationsBuilderARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2849 CreateFPToIntLocations(arena_, invoke);
2850}
2851
2852void IntrinsicCodeGeneratorARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2853 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
2854}
2855
2856void IntrinsicLocationsBuilderARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2857 CreateFPToIntLocations(arena_, invoke);
2858}
2859
2860void IntrinsicCodeGeneratorARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2861 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
2862}
2863
TatWai Chongd8c052a2016-11-02 16:12:48 +08002864void IntrinsicLocationsBuilderARM64::VisitReferenceGetReferent(HInvoke* invoke) {
2865 if (kEmitCompilerReadBarrier) {
2866 // Do not intrinsify this call with the read barrier configuration.
2867 return;
2868 }
2869 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2870 LocationSummary::kCallOnSlowPath,
2871 kIntrinsified);
2872 locations->SetInAt(0, Location::RequiresRegister());
2873 locations->SetOut(Location::SameAsFirstInput());
2874 locations->AddTemp(Location::RequiresRegister());
2875}
2876
2877void IntrinsicCodeGeneratorARM64::VisitReferenceGetReferent(HInvoke* invoke) {
2878 DCHECK(!kEmitCompilerReadBarrier);
2879 MacroAssembler* masm = GetVIXLAssembler();
2880 LocationSummary* locations = invoke->GetLocations();
2881
2882 Register obj = InputRegisterAt(invoke, 0);
2883 Register out = OutputRegister(invoke);
2884
2885 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2886 codegen_->AddSlowPath(slow_path);
2887
2888 // Load ArtMethod first.
2889 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2890 DCHECK(invoke_direct != nullptr);
2891 Register temp0 = XRegisterFrom(codegen_->GenerateCalleeMethodStaticOrDirectCall(
2892 invoke_direct, locations->GetTemp(0)));
2893
2894 // Now get declaring class.
2895 __ Ldr(temp0.W(), MemOperand(temp0, ArtMethod::DeclaringClassOffset().Int32Value()));
2896
2897 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2898 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2899 DCHECK_NE(slow_path_flag_offset, 0u);
2900 DCHECK_NE(disable_flag_offset, 0u);
2901 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2902
2903 // Check static flags that prevent using intrinsic.
2904 if (slow_path_flag_offset == disable_flag_offset + 1) {
2905 // Load two adjacent flags in one 64-bit load.
2906 __ Ldr(temp0, MemOperand(temp0, disable_flag_offset));
2907 } else {
2908 UseScratchRegisterScope temps(masm);
2909 Register temp1 = temps.AcquireW();
2910 __ Ldr(temp1.W(), MemOperand(temp0, disable_flag_offset));
2911 __ Ldr(temp0.W(), MemOperand(temp0, slow_path_flag_offset));
2912 __ Orr(temp0, temp1, temp0);
2913 }
2914 __ Cbnz(temp0, slow_path->GetEntryLabel());
2915
Artem Serov914d7a82017-02-07 14:33:49 +00002916 {
2917 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2918 vixl::EmissionCheckScope guard(codegen_->GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2919 // Fast path.
2920 __ Ldr(out, HeapOperand(obj, mirror::Reference::ReferentOffset().Int32Value()));
2921 codegen_->MaybeRecordImplicitNullCheck(invoke);
2922 }
TatWai Chongd8c052a2016-11-02 16:12:48 +08002923 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(out);
2924 __ Bind(slow_path->GetExitLabel());
2925}
2926
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002927void IntrinsicLocationsBuilderARM64::VisitIntegerValueOf(HInvoke* invoke) {
2928 InvokeRuntimeCallingConvention calling_convention;
2929 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2930 invoke,
2931 codegen_,
2932 calling_convention.GetReturnLocation(Primitive::kPrimNot),
2933 Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2934}
2935
2936void IntrinsicCodeGeneratorARM64::VisitIntegerValueOf(HInvoke* invoke) {
2937 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2938 LocationSummary* locations = invoke->GetLocations();
2939 MacroAssembler* masm = GetVIXLAssembler();
2940
2941 Register out = RegisterFrom(locations->Out(), Primitive::kPrimNot);
2942 UseScratchRegisterScope temps(masm);
2943 Register temp = temps.AcquireW();
2944 InvokeRuntimeCallingConvention calling_convention;
2945 Register argument = calling_convention.GetRegisterAt(0);
2946 if (invoke->InputAt(0)->IsConstant()) {
2947 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2948 if (value >= info.low && value <= info.high) {
2949 // Just embed the j.l.Integer in the code.
2950 ScopedObjectAccess soa(Thread::Current());
2951 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2952 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2953 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2954 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
2955 } else {
2956 // Allocate and initialize a new j.l.Integer.
2957 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2958 // JIT object table.
2959 uint32_t address =
2960 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2961 __ Ldr(argument.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
2962 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2963 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2964 __ Mov(temp.W(), value);
2965 __ Str(temp.W(), HeapOperand(out.W(), info.value_offset));
2966 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2967 // one.
2968 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2969 }
2970 } else {
2971 Register in = RegisterFrom(locations->InAt(0), Primitive::kPrimInt);
2972 // Check bounds of our cache.
2973 __ Add(out.W(), in.W(), -info.low);
2974 __ Cmp(out.W(), info.high - info.low + 1);
2975 vixl::aarch64::Label allocate, done;
2976 __ B(&allocate, hs);
2977 // If the value is within the bounds, load the j.l.Integer directly from the array.
2978 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2979 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2980 __ Ldr(temp.W(), codegen_->DeduplicateBootImageAddressLiteral(data_offset + address));
2981 MemOperand source = HeapOperand(
2982 temp, out.X(), LSL, Primitive::ComponentSizeShift(Primitive::kPrimNot));
2983 codegen_->Load(Primitive::kPrimNot, out, source);
2984 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(out);
2985 __ B(&done);
2986 __ Bind(&allocate);
2987 // Otherwise allocate and initialize a new j.l.Integer.
2988 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2989 __ Ldr(argument.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
2990 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2991 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2992 __ Str(in.W(), HeapOperand(out.W(), info.value_offset));
2993 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2994 // one.
2995 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2996 __ Bind(&done);
2997 }
2998}
2999
Aart Bik2f9fcc92016-03-01 15:16:54 -08003000UNIMPLEMENTED_INTRINSIC(ARM64, IntegerHighestOneBit)
3001UNIMPLEMENTED_INTRINSIC(ARM64, LongHighestOneBit)
3002UNIMPLEMENTED_INTRINSIC(ARM64, IntegerLowestOneBit)
3003UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit)
Andreas Gampe878d58c2015-01-15 23:24:00 -08003004
Aart Bikff7d89c2016-11-07 08:49:28 -08003005UNIMPLEMENTED_INTRINSIC(ARM64, StringStringIndexOf);
3006UNIMPLEMENTED_INTRINSIC(ARM64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003007UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferAppend);
3008UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferLength);
3009UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferToString);
3010UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderAppend);
3011UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderLength);
3012UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003013
Aart Bik0e54c012016-03-04 12:08:31 -08003014// 1.8.
3015UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt)
3016UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddLong)
3017UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt)
3018UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong)
3019UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003020
Aart Bik2f9fcc92016-03-01 15:16:54 -08003021UNREACHABLE_INTRINSICS(ARM64)
Roland Levillain4d027112015-07-01 15:41:14 +01003022
3023#undef __
3024
Andreas Gampe878d58c2015-01-15 23:24:00 -08003025} // namespace arm64
3026} // namespace art