blob: 1c82639b0680c08700bc55328ddb2ffe9f9b5eb8 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
Andra Danciu1ca6f322020-08-12 08:58:07 +000025#include "data_type-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040026#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070029#include "intrinsics_utils.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080030#include "lock_word.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040031#include "mirror/array-inl.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070032#include "mirror/object_array-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080033#include "mirror/reference.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040034#include "mirror/string.h"
Andra Danciue3e187f2020-07-30 12:19:31 +000035#include "mirror/var_handle.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080036#include "scoped_thread_state_change-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070037#include "thread-current-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040038#include "utils/x86/assembler_x86.h"
39#include "utils/x86/constants_x86.h"
40
Vladimir Marko0a516052019-10-14 13:00:44 +000041namespace art {
Mark Mendell09ed1a32015-03-25 08:30:06 -040042
43namespace x86 {
44
Mark Mendellfb8d2792015-03-31 22:16:59 -040045IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 : allocator_(codegen->GetGraph()->GetAllocator()),
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000047 codegen_(codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -040048}
49
50
Mark Mendell09ed1a32015-03-25 08:30:06 -040051X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010052 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040053}
54
55ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010056 return codegen_->GetGraph()->GetAllocator();
Mark Mendell09ed1a32015-03-25 08:30:06 -040057}
58
59bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
60 Dispatch(invoke);
61 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 if (res == nullptr) {
63 return false;
64 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000065 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040066}
67
Andreas Gampe85b62f22015-09-09 13:15:38 -070068using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040069
Roland Levillain0b671c02016-08-19 12:02:34 +010070// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
71#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
72
73// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
74class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode {
75 public:
76 explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction)
77 : SlowPathCode(instruction) {
78 DCHECK(kEmitCompilerReadBarrier);
79 DCHECK(kUseBakerReadBarrier);
80 }
81
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010082 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0b671c02016-08-19 12:02:34 +010083 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
84 LocationSummary* locations = instruction_->GetLocations();
85 DCHECK(locations->CanCall());
86 DCHECK(instruction_->IsInvokeStaticOrDirect())
87 << "Unexpected instruction in read barrier arraycopy slow path: "
88 << instruction_->DebugName();
89 DCHECK(instruction_->GetLocations()->Intrinsified());
90 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
91
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010092 int32_t element_size = DataType::Size(DataType::Type::kReference);
Roland Levillain0b671c02016-08-19 12:02:34 +010093 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
94
95 Register src = locations->InAt(0).AsRegister<Register>();
96 Location src_pos = locations->InAt(1);
97 Register dest = locations->InAt(2).AsRegister<Register>();
98 Location dest_pos = locations->InAt(3);
99 Location length = locations->InAt(4);
100 Location temp1_loc = locations->GetTemp(0);
101 Register temp1 = temp1_loc.AsRegister<Register>();
102 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
103 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
104
105 __ Bind(GetEntryLabel());
106 // In this code path, registers `temp1`, `temp2`, and `temp3`
107 // (resp.) are not used for the base source address, the base
108 // destination address, and the end source address (resp.), as in
109 // other SystemArrayCopy intrinsic code paths. Instead they are
110 // (resp.) used for:
111 // - the loop index (`i`);
112 // - the source index (`src_index`) and the loaded (source)
113 // reference (`value`); and
114 // - the destination index (`dest_index`).
115
116 // i = 0
117 __ xorl(temp1, temp1);
118 NearLabel loop;
119 __ Bind(&loop);
120 // value = src_array[i + src_pos]
121 if (src_pos.IsConstant()) {
122 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
123 int32_t adjusted_offset = offset + constant * element_size;
124 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset));
125 } else {
126 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
127 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset));
128 }
129 __ MaybeUnpoisonHeapReference(temp2);
130 // TODO: Inline the mark bit check before calling the runtime?
131 // value = ReadBarrier::Mark(value)
132 // No need to save live registers; it's taken care of by the
133 // entrypoint. Also, there is no need to update the stack mask,
134 // as this runtime call will not trigger a garbage collection.
135 // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more
136 // explanations.)
137 DCHECK_NE(temp2, ESP);
138 DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2;
Roland Levillain97c46462017-05-11 14:04:03 +0100139 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2);
Roland Levillain0b671c02016-08-19 12:02:34 +0100140 // This runtime call does not require a stack map.
141 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
142 __ MaybePoisonHeapReference(temp2);
143 // dest_array[i + dest_pos] = value
144 if (dest_pos.IsConstant()) {
145 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
146 int32_t adjusted_offset = offset + constant * element_size;
147 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2);
148 } else {
149 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
150 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2);
151 }
152 // ++i
153 __ addl(temp1, Immediate(1));
154 // if (i != length) goto loop
155 x86_codegen->GenerateIntCompare(temp1_loc, length);
156 __ j(kNotEqual, &loop);
157 __ jmp(GetExitLabel());
158 }
159
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100160 const char* GetDescription() const override { return "ReadBarrierSystemArrayCopySlowPathX86"; }
Roland Levillain0b671c02016-08-19 12:02:34 +0100161
162 private:
163 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86);
164};
165
166#undef __
167
Mark Mendell09ed1a32015-03-25 08:30:06 -0400168#define __ assembler->
169
Vladimir Markoca6fff82017-10-03 14:49:14 +0100170static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is64bit) {
171 LocationSummary* locations =
172 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400173 locations->SetInAt(0, Location::RequiresFpuRegister());
174 locations->SetOut(Location::RequiresRegister());
175 if (is64bit) {
176 locations->AddTemp(Location::RequiresFpuRegister());
177 }
178}
179
Vladimir Markoca6fff82017-10-03 14:49:14 +0100180static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is64bit) {
181 LocationSummary* locations =
182 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400183 locations->SetInAt(0, Location::RequiresRegister());
184 locations->SetOut(Location::RequiresFpuRegister());
185 if (is64bit) {
186 locations->AddTemp(Location::RequiresFpuRegister());
187 locations->AddTemp(Location::RequiresFpuRegister());
188 }
189}
190
191static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
192 Location input = locations->InAt(0);
193 Location output = locations->Out();
194 if (is64bit) {
195 // Need to use the temporary.
196 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
197 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
198 __ movd(output.AsRegisterPairLow<Register>(), temp);
199 __ psrlq(temp, Immediate(32));
200 __ movd(output.AsRegisterPairHigh<Register>(), temp);
201 } else {
202 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
203 }
204}
205
206static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
207 Location input = locations->InAt(0);
208 Location output = locations->Out();
209 if (is64bit) {
210 // Need to use the temporary.
211 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
212 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
213 __ movd(temp1, input.AsRegisterPairLow<Register>());
214 __ movd(temp2, input.AsRegisterPairHigh<Register>());
215 __ punpckldq(temp1, temp2);
216 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
217 } else {
218 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
219 }
220}
221
222void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800223 CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400224}
225void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800226 CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400227}
228
229void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800230 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400231}
232void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800233 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400234}
235
236void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800237 CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400238}
239void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800240 CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400241}
242
243void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800244 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400245}
246void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800247 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400248}
249
Vladimir Markoca6fff82017-10-03 14:49:14 +0100250static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
251 LocationSummary* locations =
252 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400253 locations->SetInAt(0, Location::RequiresRegister());
254 locations->SetOut(Location::SameAsFirstInput());
255}
256
Vladimir Markoca6fff82017-10-03 14:49:14 +0100257static void CreateLongToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
258 LocationSummary* locations =
259 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400260 locations->SetInAt(0, Location::RequiresRegister());
261 locations->SetOut(Location::RequiresRegister());
262}
263
Vladimir Markoca6fff82017-10-03 14:49:14 +0100264static void CreateLongToLongLocations(ArenaAllocator* allocator, HInvoke* invoke) {
265 LocationSummary* locations =
266 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400267 locations->SetInAt(0, Location::RequiresRegister());
268 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
269}
270
271static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 DataType::Type size,
Mark Mendell09ed1a32015-03-25 08:30:06 -0400273 X86Assembler* assembler) {
274 Register out = locations->Out().AsRegister<Register>();
275
276 switch (size) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100277 case DataType::Type::kInt16:
Mark Mendell09ed1a32015-03-25 08:30:06 -0400278 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
279 __ bswapl(out);
280 __ sarl(out, Immediate(16));
281 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100282 case DataType::Type::kInt32:
Mark Mendell09ed1a32015-03-25 08:30:06 -0400283 __ bswapl(out);
284 break;
285 default:
286 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
287 UNREACHABLE();
288 }
289}
290
291void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100292 CreateIntToIntLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400293}
294
295void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100296 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400297}
298
Mark Mendell58d25fd2015-04-03 14:52:31 -0400299void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100300 CreateLongToLongLocations(allocator_, invoke);
Mark Mendell58d25fd2015-04-03 14:52:31 -0400301}
302
303void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
304 LocationSummary* locations = invoke->GetLocations();
305 Location input = locations->InAt(0);
306 Register input_lo = input.AsRegisterPairLow<Register>();
307 Register input_hi = input.AsRegisterPairHigh<Register>();
308 Location output = locations->Out();
309 Register output_lo = output.AsRegisterPairLow<Register>();
310 Register output_hi = output.AsRegisterPairHigh<Register>();
311
312 X86Assembler* assembler = GetAssembler();
313 // Assign the inputs to the outputs, mixing low/high.
314 __ movl(output_lo, input_hi);
315 __ movl(output_hi, input_lo);
316 __ bswapl(output_lo);
317 __ bswapl(output_hi);
318}
319
Mark Mendell09ed1a32015-03-25 08:30:06 -0400320void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100321 CreateIntToIntLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400322}
323
324void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100325 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400326}
327
Vladimir Markoca6fff82017-10-03 14:49:14 +0100328static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
329 LocationSummary* locations =
330 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400331 locations->SetInAt(0, Location::RequiresFpuRegister());
332 locations->SetOut(Location::RequiresFpuRegister());
333}
334
335void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100336 CreateFPToFPLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400337}
338
339void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
340 LocationSummary* locations = invoke->GetLocations();
341 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
342 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
343
344 GetAssembler()->sqrtsd(out, in);
345}
346
Vladimir Markoca6fff82017-10-03 14:49:14 +0100347static void CreateSSE41FPToFPLocations(ArenaAllocator* allocator,
348 HInvoke* invoke,
349 CodeGeneratorX86* codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -0400350 // Do we have instruction support?
Vladimir Marko66704db2020-06-08 14:04:27 +0100351 if (!codegen->GetInstructionSetFeatures().HasSSE4_1()) {
Mark Mendellfb8d2792015-03-31 22:16:59 -0400352 return;
353 }
354
Vladimir Marko66704db2020-06-08 14:04:27 +0100355 CreateFPToFPLocations(allocator, invoke);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400356}
357
Vladimir Marko66704db2020-06-08 14:04:27 +0100358static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86Assembler* assembler, int round_mode) {
Mark Mendellfb8d2792015-03-31 22:16:59 -0400359 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko66704db2020-06-08 14:04:27 +0100360 DCHECK(!locations->WillCall());
361 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
362 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
363 __ roundsd(out, in, Immediate(round_mode));
Mark Mendellfb8d2792015-03-31 22:16:59 -0400364}
365
366void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100367 CreateSSE41FPToFPLocations(allocator_, invoke, codegen_);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400368}
369
370void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
Vladimir Marko66704db2020-06-08 14:04:27 +0100371 GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 2);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400372}
373
374void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100375 CreateSSE41FPToFPLocations(allocator_, invoke, codegen_);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400376}
377
378void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
Vladimir Marko66704db2020-06-08 14:04:27 +0100379 GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400380}
381
382void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100383 CreateSSE41FPToFPLocations(allocator_, invoke, codegen_);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400384}
385
386void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
Vladimir Marko66704db2020-06-08 14:04:27 +0100387 GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 0);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400388}
389
Mark Mendellfb8d2792015-03-31 22:16:59 -0400390void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
391 // Do we have instruction support?
Vladimir Marko66704db2020-06-08 14:04:27 +0100392 if (!codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
Mark Mendellfb8d2792015-03-31 22:16:59 -0400393 return;
394 }
395
Vladimir Marko66704db2020-06-08 14:04:27 +0100396 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
397 DCHECK(static_or_direct != nullptr);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100398 LocationSummary* locations =
Vladimir Marko66704db2020-06-08 14:04:27 +0100399 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
400 locations->SetInAt(0, Location::RequiresFpuRegister());
401 if (static_or_direct->HasSpecialInput() &&
402 invoke->InputAt(
403 static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
404 locations->SetInAt(1, Location::RequiresRegister());
405 }
406 locations->SetOut(Location::RequiresRegister());
407 locations->AddTemp(Location::RequiresFpuRegister());
408 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400409}
410
411void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
412 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko66704db2020-06-08 14:04:27 +0100413 DCHECK(!locations->WillCall());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400414
Mark Mendellfb8d2792015-03-31 22:16:59 -0400415 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700416 XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
417 XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendellfb8d2792015-03-31 22:16:59 -0400418 Register out = locations->Out().AsRegister<Register>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700419 NearLabel skip_incr, done;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400420 X86Assembler* assembler = GetAssembler();
421
Aart Bik2c9f4952016-08-01 16:52:27 -0700422 // Since no direct x86 rounding instruction matches the required semantics,
423 // this intrinsic is implemented as follows:
424 // result = floor(in);
425 // if (in - result >= 0.5f)
426 // result = result + 1.0f;
427 __ movss(t2, in);
428 __ roundss(t1, in, Immediate(1));
429 __ subss(t2, t1);
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700430 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
431 // Direct constant area available.
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000432 HX86ComputeBaseMethodAddress* method_address =
433 invoke->InputAt(1)->AsX86ComputeBaseMethodAddress();
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700434 Register constant_area = locations->InAt(1).AsRegister<Register>();
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000435 __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f),
436 method_address,
437 constant_area));
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700438 __ j(kBelow, &skip_incr);
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000439 __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f),
440 method_address,
441 constant_area));
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700442 __ Bind(&skip_incr);
443 } else {
444 // No constant area: go through stack.
445 __ pushl(Immediate(bit_cast<int32_t, float>(0.5f)));
446 __ pushl(Immediate(bit_cast<int32_t, float>(1.0f)));
447 __ comiss(t2, Address(ESP, 4));
448 __ j(kBelow, &skip_incr);
449 __ addss(t1, Address(ESP, 0));
450 __ Bind(&skip_incr);
451 __ addl(ESP, Immediate(8));
452 }
Mark Mendellfb8d2792015-03-31 22:16:59 -0400453
Aart Bik2c9f4952016-08-01 16:52:27 -0700454 // Final conversion to an integer. Unfortunately this also does not have a
455 // direct x86 instruction, since NaN should map to 0 and large positive
456 // values need to be clipped to the extreme value.
Mark Mendellfb8d2792015-03-31 22:16:59 -0400457 __ movl(out, Immediate(kPrimIntMax));
Aart Bik2c9f4952016-08-01 16:52:27 -0700458 __ cvtsi2ss(t2, out);
459 __ comiss(t1, t2);
460 __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered
461 __ movl(out, Immediate(0)); // does not change flags
462 __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out)
463 __ cvttss2si(out, t1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400464 __ Bind(&done);
465}
466
Vladimir Markoca6fff82017-10-03 14:49:14 +0100467static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
468 LocationSummary* locations =
469 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Mark Mendella4f12202015-08-06 15:23:34 -0400470 InvokeRuntimeCallingConvention calling_convention;
471 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
472 locations->SetOut(Location::FpuRegisterLocation(XMM0));
473}
474
475static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
476 LocationSummary* locations = invoke->GetLocations();
477 DCHECK(locations->WillCall());
478 DCHECK(invoke->IsInvokeStaticOrDirect());
479 X86Assembler* assembler = codegen->GetAssembler();
480
481 // We need some place to pass the parameters.
482 __ subl(ESP, Immediate(16));
483 __ cfi().AdjustCFAOffset(16);
484
485 // Pass the parameters at the bottom of the stack.
486 __ movsd(Address(ESP, 0), XMM0);
487
488 // If we have a second parameter, pass it next.
489 if (invoke->GetNumberOfArguments() == 2) {
490 __ movsd(Address(ESP, 8), XMM1);
491 }
492
493 // Now do the actual call.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100494 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Mark Mendella4f12202015-08-06 15:23:34 -0400495
496 // Extract the return value from the FP stack.
497 __ fstpl(Address(ESP, 0));
498 __ movsd(XMM0, Address(ESP, 0));
499
500 // And clean up the stack.
501 __ addl(ESP, Immediate(16));
502 __ cfi().AdjustCFAOffset(-16);
Mark Mendella4f12202015-08-06 15:23:34 -0400503}
504
Shalini Salomi Bodapati8943fa12018-11-21 15:36:00 +0530505static void CreateLowestOneBitLocations(ArenaAllocator* allocator, bool is_long, HInvoke* invoke) {
506 LocationSummary* locations =
507 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
508 if (is_long) {
509 locations->SetInAt(0, Location::RequiresRegister());
510 } else {
511 locations->SetInAt(0, Location::Any());
512 }
513 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
514}
515
516static void GenLowestOneBit(X86Assembler* assembler,
517 CodeGeneratorX86* codegen,
518 bool is_long,
519 HInvoke* invoke) {
520 LocationSummary* locations = invoke->GetLocations();
521 Location src = locations->InAt(0);
522 Location out_loc = locations->Out();
523
524 if (invoke->InputAt(0)->IsConstant()) {
525 // Evaluate this at compile time.
526 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
527 if (value == 0) {
528 if (is_long) {
529 __ xorl(out_loc.AsRegisterPairLow<Register>(), out_loc.AsRegisterPairLow<Register>());
530 __ xorl(out_loc.AsRegisterPairHigh<Register>(), out_loc.AsRegisterPairHigh<Register>());
531 } else {
532 __ xorl(out_loc.AsRegister<Register>(), out_loc.AsRegister<Register>());
533 }
534 return;
535 }
536 // Nonzero value.
537 value = is_long ? CTZ(static_cast<uint64_t>(value))
538 : CTZ(static_cast<uint32_t>(value));
539 if (is_long) {
540 if (value >= 32) {
541 int shift = value-32;
542 codegen->Load32BitValue(out_loc.AsRegisterPairLow<Register>(), 0);
543 codegen->Load32BitValue(out_loc.AsRegisterPairHigh<Register>(), 1 << shift);
544 } else {
545 codegen->Load32BitValue(out_loc.AsRegisterPairLow<Register>(), 1 << value);
546 codegen->Load32BitValue(out_loc.AsRegisterPairHigh<Register>(), 0);
547 }
548 } else {
549 codegen->Load32BitValue(out_loc.AsRegister<Register>(), 1 << value);
550 }
551 return;
552 }
553 // Handle non constant case
554 if (is_long) {
555 DCHECK(src.IsRegisterPair());
556 Register src_lo = src.AsRegisterPairLow<Register>();
557 Register src_hi = src.AsRegisterPairHigh<Register>();
558
559 Register out_lo = out_loc.AsRegisterPairLow<Register>();
560 Register out_hi = out_loc.AsRegisterPairHigh<Register>();
561
562 __ movl(out_lo, src_lo);
563 __ movl(out_hi, src_hi);
564
565 __ negl(out_lo);
566 __ adcl(out_hi, Immediate(0));
567 __ negl(out_hi);
568
569 __ andl(out_lo, src_lo);
570 __ andl(out_hi, src_hi);
571 } else {
572 if (codegen->GetInstructionSetFeatures().HasAVX2() && src.IsRegister()) {
573 Register out = out_loc.AsRegister<Register>();
574 __ blsi(out, src.AsRegister<Register>());
575 } else {
576 Register out = out_loc.AsRegister<Register>();
577 // Do tmp & -tmp
578 if (src.IsRegister()) {
579 __ movl(out, src.AsRegister<Register>());
580 } else {
581 DCHECK(src.IsStackSlot());
582 __ movl(out, Address(ESP, src.GetStackIndex()));
583 }
584 __ negl(out);
585
586 if (src.IsRegister()) {
587 __ andl(out, src.AsRegister<Register>());
588 } else {
589 __ andl(out, Address(ESP, src.GetStackIndex()));
590 }
591 }
592 }
593}
594
Mark Mendella4f12202015-08-06 15:23:34 -0400595void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100596 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400597}
598
599void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
600 GenFPToFPCall(invoke, codegen_, kQuickCos);
601}
602
603void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100604 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400605}
606
607void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
608 GenFPToFPCall(invoke, codegen_, kQuickSin);
609}
610
611void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100612 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400613}
614
615void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
616 GenFPToFPCall(invoke, codegen_, kQuickAcos);
617}
618
619void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100620 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400621}
622
623void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
624 GenFPToFPCall(invoke, codegen_, kQuickAsin);
625}
626
627void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100628 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400629}
630
631void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
632 GenFPToFPCall(invoke, codegen_, kQuickAtan);
633}
634
635void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100636 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400637}
638
639void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
640 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
641}
642
643void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100644 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400645}
646
647void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
648 GenFPToFPCall(invoke, codegen_, kQuickCosh);
649}
650
651void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100652 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400653}
654
655void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
656 GenFPToFPCall(invoke, codegen_, kQuickExp);
657}
658
659void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100660 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400661}
662
663void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
664 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
665}
666
667void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100668 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400669}
670
671void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
672 GenFPToFPCall(invoke, codegen_, kQuickLog);
673}
674
675void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100676 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400677}
678
679void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
680 GenFPToFPCall(invoke, codegen_, kQuickLog10);
681}
682
683void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100684 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400685}
686
687void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
688 GenFPToFPCall(invoke, codegen_, kQuickSinh);
689}
690
691void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100692 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400693}
694
695void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
696 GenFPToFPCall(invoke, codegen_, kQuickTan);
697}
698
699void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100700 CreateFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400701}
702
703void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
704 GenFPToFPCall(invoke, codegen_, kQuickTanh);
705}
706
Shalini Salomi Bodapati8943fa12018-11-21 15:36:00 +0530707void IntrinsicLocationsBuilderX86::VisitIntegerLowestOneBit(HInvoke* invoke) {
708 CreateLowestOneBitLocations(allocator_, /*is_long=*/ false, invoke);
709}
710void IntrinsicCodeGeneratorX86::VisitIntegerLowestOneBit(HInvoke* invoke) {
711 GenLowestOneBit(GetAssembler(), codegen_, /*is_long=*/ false, invoke);
712}
713
714void IntrinsicLocationsBuilderX86::VisitLongLowestOneBit(HInvoke* invoke) {
715 CreateLowestOneBitLocations(allocator_, /*is_long=*/ true, invoke);
716}
717
718void IntrinsicCodeGeneratorX86::VisitLongLowestOneBit(HInvoke* invoke) {
719 GenLowestOneBit(GetAssembler(), codegen_, /*is_long=*/ true, invoke);
720}
721
Vladimir Markoca6fff82017-10-03 14:49:14 +0100722static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
723 LocationSummary* locations =
724 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Mark Mendella4f12202015-08-06 15:23:34 -0400725 InvokeRuntimeCallingConvention calling_convention;
726 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
727 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
728 locations->SetOut(Location::FpuRegisterLocation(XMM0));
729}
730
731void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100732 CreateFPFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400733}
734
735void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
736 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
737}
738
Vladimir Marko4d179872018-01-19 14:50:10 +0000739void IntrinsicLocationsBuilderX86::VisitMathPow(HInvoke* invoke) {
740 CreateFPFPToFPCallLocations(allocator_, invoke);
741}
742
743void IntrinsicCodeGeneratorX86::VisitMathPow(HInvoke* invoke) {
744 GenFPToFPCall(invoke, codegen_, kQuickPow);
745}
746
Mark Mendella4f12202015-08-06 15:23:34 -0400747void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100748 CreateFPFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400749}
750
751void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
752 GenFPToFPCall(invoke, codegen_, kQuickHypot);
753}
754
755void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100756 CreateFPFPToFPCallLocations(allocator_, invoke);
Mark Mendella4f12202015-08-06 15:23:34 -0400757}
758
759void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
760 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
761}
762
Mark Mendell6bc53a92015-07-01 14:26:52 -0400763void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
764 // We need at least two of the positions or length to be an integer constant,
765 // or else we won't have enough free registers.
766 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
767 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
768 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
769
770 int num_constants =
771 ((src_pos != nullptr) ? 1 : 0)
772 + ((dest_pos != nullptr) ? 1 : 0)
773 + ((length != nullptr) ? 1 : 0);
774
775 if (num_constants < 2) {
776 // Not enough free registers.
777 return;
778 }
779
780 // As long as we are checking, we might as well check to see if the src and dest
781 // positions are >= 0.
782 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
783 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
784 // We will have to fail anyways.
785 return;
786 }
787
788 // And since we are already checking, check the length too.
789 if (length != nullptr) {
790 int32_t len = length->GetValue();
791 if (len < 0) {
792 // Just call as normal.
793 return;
794 }
795 }
796
797 // Okay, it is safe to generate inline code.
798 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +0100799 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400800 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
801 locations->SetInAt(0, Location::RequiresRegister());
802 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
803 locations->SetInAt(2, Location::RequiresRegister());
804 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
805 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
806
807 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
808 locations->AddTemp(Location::RegisterLocation(ESI));
809 locations->AddTemp(Location::RegisterLocation(EDI));
810 locations->AddTemp(Location::RegisterLocation(ECX));
811}
812
813static void CheckPosition(X86Assembler* assembler,
814 Location pos,
815 Register input,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100816 Location length,
Andreas Gampe85b62f22015-09-09 13:15:38 -0700817 SlowPathCode* slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100818 Register temp,
819 bool length_is_input_length = false) {
820 // Where is the length in the Array?
Mark Mendell6bc53a92015-07-01 14:26:52 -0400821 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
822
823 if (pos.IsConstant()) {
824 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
825 if (pos_const == 0) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100826 if (!length_is_input_length) {
827 // Check that length(input) >= length.
828 if (length.IsConstant()) {
829 __ cmpl(Address(input, length_offset),
830 Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
831 } else {
832 __ cmpl(Address(input, length_offset), length.AsRegister<Register>());
833 }
834 __ j(kLess, slow_path->GetEntryLabel());
835 }
Mark Mendell6bc53a92015-07-01 14:26:52 -0400836 } else {
837 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100838 __ movl(temp, Address(input, length_offset));
839 __ subl(temp, Immediate(pos_const));
Mark Mendell6bc53a92015-07-01 14:26:52 -0400840 __ j(kLess, slow_path->GetEntryLabel());
841
842 // Check that (length(input) - pos) >= length.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100843 if (length.IsConstant()) {
844 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
845 } else {
846 __ cmpl(temp, length.AsRegister<Register>());
847 }
Mark Mendell6bc53a92015-07-01 14:26:52 -0400848 __ j(kLess, slow_path->GetEntryLabel());
849 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100850 } else if (length_is_input_length) {
851 // The only way the copy can succeed is if pos is zero.
852 Register pos_reg = pos.AsRegister<Register>();
853 __ testl(pos_reg, pos_reg);
854 __ j(kNotEqual, slow_path->GetEntryLabel());
Mark Mendell6bc53a92015-07-01 14:26:52 -0400855 } else {
856 // Check that pos >= 0.
857 Register pos_reg = pos.AsRegister<Register>();
858 __ testl(pos_reg, pos_reg);
859 __ j(kLess, slow_path->GetEntryLabel());
860
861 // Check that pos <= length(input).
862 __ cmpl(Address(input, length_offset), pos_reg);
863 __ j(kLess, slow_path->GetEntryLabel());
864
865 // Check that (length(input) - pos) >= length.
866 __ movl(temp, Address(input, length_offset));
867 __ subl(temp, pos_reg);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100868 if (length.IsConstant()) {
869 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
870 } else {
871 __ cmpl(temp, length.AsRegister<Register>());
872 }
Mark Mendell6bc53a92015-07-01 14:26:52 -0400873 __ j(kLess, slow_path->GetEntryLabel());
874 }
875}
876
877void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
878 X86Assembler* assembler = GetAssembler();
879 LocationSummary* locations = invoke->GetLocations();
880
881 Register src = locations->InAt(0).AsRegister<Register>();
882 Location srcPos = locations->InAt(1);
883 Register dest = locations->InAt(2).AsRegister<Register>();
884 Location destPos = locations->InAt(3);
885 Location length = locations->InAt(4);
886
887 // Temporaries that we need for MOVSW.
888 Register src_base = locations->GetTemp(0).AsRegister<Register>();
889 DCHECK_EQ(src_base, ESI);
890 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
891 DCHECK_EQ(dest_base, EDI);
892 Register count = locations->GetTemp(2).AsRegister<Register>();
893 DCHECK_EQ(count, ECX);
894
Vladimir Marko174b2e22017-10-12 13:34:49 +0100895 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400896 codegen_->AddSlowPath(slow_path);
897
898 // Bail out if the source and destination are the same (to handle overlap).
899 __ cmpl(src, dest);
900 __ j(kEqual, slow_path->GetEntryLabel());
901
902 // Bail out if the source is null.
903 __ testl(src, src);
904 __ j(kEqual, slow_path->GetEntryLabel());
905
906 // Bail out if the destination is null.
907 __ testl(dest, dest);
908 __ j(kEqual, slow_path->GetEntryLabel());
909
910 // If the length is negative, bail out.
911 // We have already checked in the LocationsBuilder for the constant case.
912 if (!length.IsConstant()) {
913 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
914 __ j(kLess, slow_path->GetEntryLabel());
915 }
916
917 // We need the count in ECX.
918 if (length.IsConstant()) {
919 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
920 } else {
921 __ movl(count, length.AsRegister<Register>());
922 }
923
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100924 // Validity checks: source. Use src_base as a temporary register.
925 CheckPosition(assembler, srcPos, src, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400926
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +0100927 // Validity checks: dest. Use src_base as a temporary register.
928 CheckPosition(assembler, destPos, dest, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400929
930 // Okay, everything checks out. Finally time to do the copy.
931 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100932 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400933 DCHECK_EQ(char_size, 2u);
934
935 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
936
937 if (srcPos.IsConstant()) {
938 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
939 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
940 } else {
941 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
942 ScaleFactor::TIMES_2, data_offset));
943 }
944 if (destPos.IsConstant()) {
945 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
946
947 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
948 } else {
949 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
950 ScaleFactor::TIMES_2, data_offset));
951 }
952
953 // Do the move.
954 __ rep_movsw();
955
956 __ Bind(slow_path->GetExitLabel());
957}
958
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000959void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
960 // The inputs plus one temp.
Vladimir Markoca6fff82017-10-03 14:49:14 +0100961 LocationSummary* locations = new (allocator_) LocationSummary(
962 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000963 InvokeRuntimeCallingConvention calling_convention;
964 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
965 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
966 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000967}
968
969void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
970 X86Assembler* assembler = GetAssembler();
971 LocationSummary* locations = invoke->GetLocations();
972
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000973 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100974 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000975
976 Register argument = locations->InAt(1).AsRegister<Register>();
977 __ testl(argument, argument);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100978 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000979 codegen_->AddSlowPath(slow_path);
980 __ j(kEqual, slow_path->GetEntryLabel());
981
Serban Constantinescuba45db02016-07-12 22:53:02 +0100982 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000983 __ Bind(slow_path->GetExitLabel());
984}
985
Agi Csakid7138c82015-08-13 17:46:44 -0700986void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100987 LocationSummary* locations =
988 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Agi Csakid7138c82015-08-13 17:46:44 -0700989 locations->SetInAt(0, Location::RequiresRegister());
990 locations->SetInAt(1, Location::RequiresRegister());
991
992 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
993 locations->AddTemp(Location::RegisterLocation(ECX));
994 locations->AddTemp(Location::RegisterLocation(EDI));
995
996 // Set output, ESI needed for repe_cmpsl instruction anyways.
997 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
998}
999
1000void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1001 X86Assembler* assembler = GetAssembler();
1002 LocationSummary* locations = invoke->GetLocations();
1003
1004 Register str = locations->InAt(0).AsRegister<Register>();
1005 Register arg = locations->InAt(1).AsRegister<Register>();
1006 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1007 Register edi = locations->GetTemp(1).AsRegister<Register>();
1008 Register esi = locations->Out().AsRegister<Register>();
1009
Mark Mendell0c9497d2015-08-21 09:30:05 -04001010 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001011
1012 // Get offsets of count, value, and class fields within a string object.
1013 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1014 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1015 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1016
1017 // Note that the null check must have been done earlier.
1018 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1019
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001020 StringEqualsOptimizations optimizations(invoke);
1021 if (!optimizations.GetArgumentNotNull()) {
1022 // Check if input is null, return false if it is.
1023 __ testl(arg, arg);
1024 __ j(kEqual, &return_false);
1025 }
Agi Csakid7138c82015-08-13 17:46:44 -07001026
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001027 if (!optimizations.GetArgumentIsString()) {
Vladimir Marko53b52002016-05-24 19:30:45 +01001028 // Instanceof check for the argument by comparing class fields.
1029 // All string objects must have the same type since String cannot be subclassed.
1030 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1031 // If the argument is a string object, its class field must be equal to receiver's class field.
Roland Levillain1d775d22018-09-07 13:56:57 +01001032 //
1033 // As the String class is expected to be non-movable, we can read the class
1034 // field from String.equals' arguments without read barriers.
1035 AssertNonMovableStringClass();
1036 // Also, because we use the loaded class references only to compare them, we
1037 // don't need to unpoison them.
1038 // /* HeapReference<Class> */ ecx = str->klass_
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001039 __ movl(ecx, Address(str, class_offset));
Roland Levillain1d775d22018-09-07 13:56:57 +01001040 // if (ecx != /* HeapReference<Class> */ arg->klass_) return false
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001041 __ cmpl(ecx, Address(arg, class_offset));
1042 __ j(kNotEqual, &return_false);
1043 }
Agi Csakid7138c82015-08-13 17:46:44 -07001044
1045 // Reference equality check, return true if same reference.
1046 __ cmpl(str, arg);
1047 __ j(kEqual, &return_true);
1048
jessicahandojo4877b792016-09-08 19:49:13 -07001049 // Load length and compression flag of receiver string.
Agi Csakid7138c82015-08-13 17:46:44 -07001050 __ movl(ecx, Address(str, count_offset));
jessicahandojo4877b792016-09-08 19:49:13 -07001051 // Check if lengths and compression flags are equal, return false if they're not.
1052 // Two identical strings will always have same compression style since
1053 // compression style is decided on alloc.
Agi Csakid7138c82015-08-13 17:46:44 -07001054 __ cmpl(ecx, Address(arg, count_offset));
1055 __ j(kNotEqual, &return_false);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001056 // Return true if strings are empty. Even with string compression `count == 0` means empty.
1057 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1058 "Expecting 0=compressed, 1=uncompressed");
1059 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001060
jessicahandojo4877b792016-09-08 19:49:13 -07001061 if (mirror::kUseStringCompression) {
1062 NearLabel string_uncompressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001063 // Extract length and differentiate between both compressed or both uncompressed.
1064 // Different compression style is cut above.
1065 __ shrl(ecx, Immediate(1));
1066 __ j(kCarrySet, &string_uncompressed);
jessicahandojo4877b792016-09-08 19:49:13 -07001067 // Divide string length by 2, rounding up, and continue as if uncompressed.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001068 __ addl(ecx, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07001069 __ shrl(ecx, Immediate(1));
1070 __ Bind(&string_uncompressed);
1071 }
Agi Csakid7138c82015-08-13 17:46:44 -07001072 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1073 __ leal(esi, Address(str, value_offset));
1074 __ leal(edi, Address(arg, value_offset));
1075
jessicahandojo4877b792016-09-08 19:49:13 -07001076 // Divide string length by 2 to compare characters 2 at a time and adjust for lengths not
1077 // divisible by 2.
Agi Csakid7138c82015-08-13 17:46:44 -07001078 __ addl(ecx, Immediate(1));
1079 __ shrl(ecx, Immediate(1));
1080
jessicahandojo4877b792016-09-08 19:49:13 -07001081 // Assertions that must hold in order to compare strings 2 characters (uncompressed)
1082 // or 4 characters (compressed) at a time.
Agi Csakid7138c82015-08-13 17:46:44 -07001083 DCHECK_ALIGNED(value_offset, 4);
1084 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1085
1086 // Loop to compare strings two characters at a time starting at the beginning of the string.
1087 __ repe_cmpsl();
1088 // If strings are not equal, zero flag will be cleared.
1089 __ j(kNotEqual, &return_false);
1090
1091 // Return true and exit the function.
1092 // If loop does not result in returning false, we return true.
1093 __ Bind(&return_true);
1094 __ movl(esi, Immediate(1));
1095 __ jmp(&end);
1096
1097 // Return false and exit the function.
1098 __ Bind(&return_false);
1099 __ xorl(esi, esi);
1100 __ Bind(&end);
1101}
1102
Andreas Gampe21030dd2015-05-07 14:46:15 -07001103static void CreateStringIndexOfLocations(HInvoke* invoke,
1104 ArenaAllocator* allocator,
1105 bool start_at_zero) {
1106 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1107 LocationSummary::kCallOnSlowPath,
1108 kIntrinsified);
1109 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1110 locations->SetInAt(0, Location::RegisterLocation(EDI));
1111 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1112 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1113 // of the instruction explicitly.
1114 // Note: This works as we don't clobber EAX anywhere.
1115 locations->SetInAt(1, Location::RegisterLocation(EAX));
1116 if (!start_at_zero) {
1117 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1118 }
1119 // As we clobber EDI during execution anyways, also use it as the output.
1120 locations->SetOut(Location::SameAsFirstInput());
1121
1122 // repne scasw uses ECX as the counter.
1123 locations->AddTemp(Location::RegisterLocation(ECX));
1124 // Need another temporary to be able to compute the result.
1125 locations->AddTemp(Location::RequiresRegister());
jessicahandojo4877b792016-09-08 19:49:13 -07001126 if (mirror::kUseStringCompression) {
1127 // Need another temporary to be able to save unflagged string length.
1128 locations->AddTemp(Location::RequiresRegister());
1129 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001130}
1131
1132static void GenerateStringIndexOf(HInvoke* invoke,
1133 X86Assembler* assembler,
1134 CodeGeneratorX86* codegen,
Andreas Gampe21030dd2015-05-07 14:46:15 -07001135 bool start_at_zero) {
1136 LocationSummary* locations = invoke->GetLocations();
1137
1138 // Note that the null check must have been done earlier.
1139 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1140
1141 Register string_obj = locations->InAt(0).AsRegister<Register>();
1142 Register search_value = locations->InAt(1).AsRegister<Register>();
1143 Register counter = locations->GetTemp(0).AsRegister<Register>();
1144 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1145 Register out = locations->Out().AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07001146 // Only used when string compression feature is on.
1147 Register string_length_flagged;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001148
1149 // Check our assumptions for registers.
1150 DCHECK_EQ(string_obj, EDI);
1151 DCHECK_EQ(search_value, EAX);
1152 DCHECK_EQ(counter, ECX);
1153 DCHECK_EQ(out, EDI);
1154
1155 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001156 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001157 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001158 HInstruction* code_point = invoke->InputAt(1);
1159 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001160 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampe21030dd2015-05-07 14:46:15 -07001161 std::numeric_limits<uint16_t>::max()) {
1162 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1163 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001164 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001165 codegen->AddSlowPath(slow_path);
1166 __ jmp(slow_path->GetEntryLabel());
1167 __ Bind(slow_path->GetExitLabel());
1168 return;
1169 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001170 } else if (code_point->GetType() != DataType::Type::kUint16) {
Andreas Gampe21030dd2015-05-07 14:46:15 -07001171 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
Vladimir Marko174b2e22017-10-12 13:34:49 +01001172 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001173 codegen->AddSlowPath(slow_path);
1174 __ j(kAbove, slow_path->GetEntryLabel());
1175 }
1176
1177 // From here down, we know that we are looking for a char that fits in 16 bits.
1178 // Location of reference to data array within the String object.
1179 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1180 // Location of count within the String object.
1181 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1182
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001183 // Load the count field of the string containing the length and compression flag.
Andreas Gampe21030dd2015-05-07 14:46:15 -07001184 __ movl(string_length, Address(string_obj, count_offset));
1185
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001186 // Do a zero-length check. Even with string compression `count == 0` means empty.
1187 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1188 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe21030dd2015-05-07 14:46:15 -07001189 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001190 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001191 __ testl(string_length, string_length);
1192 __ j(kEqual, &not_found_label);
1193
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001194 if (mirror::kUseStringCompression) {
1195 string_length_flagged = locations->GetTemp(2).AsRegister<Register>();
1196 __ movl(string_length_flagged, string_length);
1197 // Extract the length and shift out the least significant bit used as compression flag.
1198 __ shrl(string_length, Immediate(1));
1199 }
1200
Andreas Gampe21030dd2015-05-07 14:46:15 -07001201 if (start_at_zero) {
1202 // Number of chars to scan is the same as the string length.
1203 __ movl(counter, string_length);
1204
1205 // Move to the start of the string.
1206 __ addl(string_obj, Immediate(value_offset));
1207 } else {
1208 Register start_index = locations->InAt(2).AsRegister<Register>();
1209
1210 // Do a start_index check.
1211 __ cmpl(start_index, string_length);
1212 __ j(kGreaterEqual, &not_found_label);
1213
1214 // Ensure we have a start index >= 0;
1215 __ xorl(counter, counter);
1216 __ cmpl(start_index, Immediate(0));
1217 __ cmovl(kGreater, counter, start_index);
1218
jessicahandojo4877b792016-09-08 19:49:13 -07001219 if (mirror::kUseStringCompression) {
1220 NearLabel modify_counter, offset_uncompressed_label;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001221 __ testl(string_length_flagged, Immediate(1));
1222 __ j(kNotZero, &offset_uncompressed_label);
jessicahandojo4877b792016-09-08 19:49:13 -07001223 // Move to the start of the string: string_obj + value_offset + start_index.
1224 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_1, value_offset));
1225 __ jmp(&modify_counter);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001226
jessicahandojo4877b792016-09-08 19:49:13 -07001227 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1228 __ Bind(&offset_uncompressed_label);
1229 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1230
1231 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1232 // compare.
1233 __ Bind(&modify_counter);
1234 } else {
1235 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1236 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001237 __ negl(counter);
1238 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1239 }
1240
jessicahandojo4877b792016-09-08 19:49:13 -07001241 if (mirror::kUseStringCompression) {
1242 NearLabel uncompressed_string_comparison;
1243 NearLabel comparison_done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001244 __ testl(string_length_flagged, Immediate(1));
1245 __ j(kNotZero, &uncompressed_string_comparison);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001246
jessicahandojo4877b792016-09-08 19:49:13 -07001247 // Check if EAX (search_value) is ASCII.
1248 __ cmpl(search_value, Immediate(127));
1249 __ j(kGreater, &not_found_label);
1250 // Comparing byte-per-byte.
1251 __ repne_scasb();
1252 __ jmp(&comparison_done);
1253
1254 // Everything is set up for repne scasw:
1255 // * Comparison address in EDI.
1256 // * Counter in ECX.
1257 __ Bind(&uncompressed_string_comparison);
1258 __ repne_scasw();
1259 __ Bind(&comparison_done);
1260 } else {
1261 __ repne_scasw();
1262 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001263 // Did we find a match?
1264 __ j(kNotEqual, &not_found_label);
1265
1266 // Yes, we matched. Compute the index of the result.
1267 __ subl(string_length, counter);
1268 __ leal(out, Address(string_length, -1));
1269
Mark Mendell0c9497d2015-08-21 09:30:05 -04001270 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001271 __ jmp(&done);
1272
1273 // Failed to match; return -1.
1274 __ Bind(&not_found_label);
1275 __ movl(out, Immediate(-1));
1276
1277 // And join up at the end.
1278 __ Bind(&done);
1279 if (slow_path != nullptr) {
1280 __ Bind(slow_path->GetExitLabel());
1281 }
1282}
1283
1284void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001285 CreateStringIndexOfLocations(invoke, allocator_, /* start_at_zero= */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001286}
1287
1288void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001289 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001290}
1291
1292void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001293 CreateStringIndexOfLocations(invoke, allocator_, /* start_at_zero= */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001294}
1295
1296void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001297 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001298}
1299
Jeff Hao848f70a2014-01-15 13:49:50 -08001300void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001301 LocationSummary* locations = new (allocator_) LocationSummary(
1302 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Jeff Hao848f70a2014-01-15 13:49:50 -08001303 InvokeRuntimeCallingConvention calling_convention;
1304 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1305 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1306 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1307 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1308 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001309}
1310
1311void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1312 X86Assembler* assembler = GetAssembler();
1313 LocationSummary* locations = invoke->GetLocations();
1314
1315 Register byte_array = locations->InAt(0).AsRegister<Register>();
1316 __ testl(byte_array, byte_array);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001317 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001318 codegen_->AddSlowPath(slow_path);
1319 __ j(kEqual, slow_path->GetEntryLabel());
1320
Serban Constantinescuba45db02016-07-12 22:53:02 +01001321 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001322 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001323 __ Bind(slow_path->GetExitLabel());
1324}
1325
1326void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001327 LocationSummary* locations =
1328 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Jeff Hao848f70a2014-01-15 13:49:50 -08001329 InvokeRuntimeCallingConvention calling_convention;
1330 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1331 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1332 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1333 locations->SetOut(Location::RegisterLocation(EAX));
1334}
1335
1336void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001337 // No need to emit code checking whether `locations->InAt(2)` is a null
1338 // pointer, as callers of the native method
1339 //
1340 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1341 //
1342 // all include a null check on `data` before calling that method.
Serban Constantinescuba45db02016-07-12 22:53:02 +01001343 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001344 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001345}
1346
1347void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001348 LocationSummary* locations = new (allocator_) LocationSummary(
1349 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Jeff Hao848f70a2014-01-15 13:49:50 -08001350 InvokeRuntimeCallingConvention calling_convention;
1351 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1352 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001353}
1354
1355void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1356 X86Assembler* assembler = GetAssembler();
1357 LocationSummary* locations = invoke->GetLocations();
1358
1359 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1360 __ testl(string_to_copy, string_to_copy);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001361 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001362 codegen_->AddSlowPath(slow_path);
1363 __ j(kEqual, slow_path->GetEntryLabel());
1364
Serban Constantinescuba45db02016-07-12 22:53:02 +01001365 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001366 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001367 __ Bind(slow_path->GetExitLabel());
1368}
1369
Mark Mendell8f8926a2015-08-17 11:39:06 -04001370void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1371 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001372 LocationSummary* locations =
1373 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell8f8926a2015-08-17 11:39:06 -04001374 locations->SetInAt(0, Location::RequiresRegister());
1375 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1376 // Place srcEnd in ECX to save a move below.
1377 locations->SetInAt(2, Location::RegisterLocation(ECX));
1378 locations->SetInAt(3, Location::RequiresRegister());
1379 locations->SetInAt(4, Location::RequiresRegister());
1380
1381 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1382 // We don't have enough registers to also grab ECX, so handle below.
1383 locations->AddTemp(Location::RegisterLocation(ESI));
1384 locations->AddTemp(Location::RegisterLocation(EDI));
1385}
1386
1387void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1388 X86Assembler* assembler = GetAssembler();
1389 LocationSummary* locations = invoke->GetLocations();
1390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001391 size_t char_component_size = DataType::Size(DataType::Type::kUint16);
Mark Mendell8f8926a2015-08-17 11:39:06 -04001392 // Location of data in char array buffer.
1393 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1394 // Location of char array data in string.
1395 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1396
1397 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1398 Register obj = locations->InAt(0).AsRegister<Register>();
1399 Location srcBegin = locations->InAt(1);
1400 int srcBegin_value =
1401 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1402 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1403 Register dst = locations->InAt(3).AsRegister<Register>();
1404 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1405
1406 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001407 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Mark Mendell8f8926a2015-08-17 11:39:06 -04001408 DCHECK_EQ(char_size, 2u);
1409
Mark Mendell8f8926a2015-08-17 11:39:06 -04001410 // Compute the number of chars (words) to move.
jessicahandojo4877b792016-09-08 19:49:13 -07001411 // Save ECX, since we don't know if it will be used later.
Mark Mendell8f8926a2015-08-17 11:39:06 -04001412 __ pushl(ECX);
1413 int stack_adjust = kX86WordSize;
1414 __ cfi().AdjustCFAOffset(stack_adjust);
1415 DCHECK_EQ(srcEnd, ECX);
1416 if (srcBegin.IsConstant()) {
jessicahandojo4877b792016-09-08 19:49:13 -07001417 __ subl(ECX, Immediate(srcBegin_value));
Mark Mendell8f8926a2015-08-17 11:39:06 -04001418 } else {
1419 DCHECK(srcBegin.IsRegister());
1420 __ subl(ECX, srcBegin.AsRegister<Register>());
1421 }
1422
jessicahandojo4877b792016-09-08 19:49:13 -07001423 NearLabel done;
1424 if (mirror::kUseStringCompression) {
1425 // Location of count in string
1426 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001427 const size_t c_char_size = DataType::Size(DataType::Type::kInt8);
jessicahandojo4877b792016-09-08 19:49:13 -07001428 DCHECK_EQ(c_char_size, 1u);
1429 __ pushl(EAX);
1430 __ cfi().AdjustCFAOffset(stack_adjust);
1431
1432 NearLabel copy_loop, copy_uncompressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001433 __ testl(Address(obj, count_offset), Immediate(1));
1434 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1435 "Expecting 0=compressed, 1=uncompressed");
1436 __ j(kNotZero, &copy_uncompressed);
jessicahandojo4877b792016-09-08 19:49:13 -07001437 // Compute the address of the source string by adding the number of chars from
1438 // the source beginning to the value offset of a string.
1439 __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_1, value_offset));
1440
1441 // Start the loop to copy String's value to Array of Char.
1442 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1443 __ Bind(&copy_loop);
1444 __ jecxz(&done);
1445 // Use EAX temporary (convert byte from ESI to word).
1446 // TODO: Use LODSB/STOSW (not supported by X86Assembler) with AH initialized to 0.
1447 __ movzxb(EAX, Address(ESI, 0));
1448 __ movw(Address(EDI, 0), EAX);
1449 __ leal(EDI, Address(EDI, char_size));
1450 __ leal(ESI, Address(ESI, c_char_size));
1451 // TODO: Add support for LOOP to X86Assembler.
1452 __ subl(ECX, Immediate(1));
1453 __ jmp(&copy_loop);
1454 __ Bind(&copy_uncompressed);
1455 }
1456
1457 // Do the copy for uncompressed string.
1458 // Compute the address of the destination buffer.
1459 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1460 __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_2, value_offset));
Mark Mendell8f8926a2015-08-17 11:39:06 -04001461 __ rep_movsw();
1462
jessicahandojo4877b792016-09-08 19:49:13 -07001463 __ Bind(&done);
1464 if (mirror::kUseStringCompression) {
1465 // Restore EAX.
1466 __ popl(EAX);
1467 __ cfi().AdjustCFAOffset(-stack_adjust);
1468 }
1469 // Restore ECX.
Mark Mendell8f8926a2015-08-17 11:39:06 -04001470 __ popl(ECX);
1471 __ cfi().AdjustCFAOffset(-stack_adjust);
1472}
1473
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001474static void GenPeek(LocationSummary* locations, DataType::Type size, X86Assembler* assembler) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001475 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1476 Location out_loc = locations->Out();
1477 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1478 // to avoid a SIGBUS.
1479 switch (size) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001480 case DataType::Type::kInt8:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001481 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1482 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001483 case DataType::Type::kInt16:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001484 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1485 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001486 case DataType::Type::kInt32:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001487 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1488 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001489 case DataType::Type::kInt64:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001490 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1491 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1492 break;
1493 default:
1494 LOG(FATAL) << "Type not recognized for peek: " << size;
1495 UNREACHABLE();
1496 }
1497}
1498
1499void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001500 CreateLongToIntLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001501}
1502
1503void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001504 GenPeek(invoke->GetLocations(), DataType::Type::kInt8, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001505}
1506
1507void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001508 CreateLongToIntLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001509}
1510
1511void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001512 GenPeek(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001513}
1514
1515void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001516 CreateLongToLongLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001517}
1518
1519void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001520 GenPeek(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001521}
1522
1523void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001524 CreateLongToIntLocations(allocator_, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001525}
1526
1527void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001528 GenPeek(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001529}
1530
Vladimir Markoca6fff82017-10-03 14:49:14 +01001531static void CreateLongIntToVoidLocations(ArenaAllocator* allocator,
1532 DataType::Type size,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001533 HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001534 LocationSummary* locations =
1535 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001536 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001537 HInstruction* value = invoke->InputAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001538 if (size == DataType::Type::kInt8) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001539 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1540 } else {
1541 locations->SetInAt(1, Location::RegisterOrConstant(value));
1542 }
1543}
1544
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001545static void GenPoke(LocationSummary* locations, DataType::Type size, X86Assembler* assembler) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001546 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1547 Location value_loc = locations->InAt(1);
1548 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1549 // to avoid a SIGBUS.
1550 switch (size) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001551 case DataType::Type::kInt8:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001552 if (value_loc.IsConstant()) {
1553 __ movb(Address(address, 0),
1554 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1555 } else {
1556 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1557 }
1558 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001559 case DataType::Type::kInt16:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001560 if (value_loc.IsConstant()) {
1561 __ movw(Address(address, 0),
1562 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1563 } else {
1564 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1565 }
1566 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001567 case DataType::Type::kInt32:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001568 if (value_loc.IsConstant()) {
1569 __ movl(Address(address, 0),
1570 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1571 } else {
1572 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1573 }
1574 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001575 case DataType::Type::kInt64:
Mark Mendell09ed1a32015-03-25 08:30:06 -04001576 if (value_loc.IsConstant()) {
1577 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1578 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1579 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1580 } else {
1581 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1582 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1583 }
1584 break;
1585 default:
1586 LOG(FATAL) << "Type not recognized for poke: " << size;
1587 UNREACHABLE();
1588 }
1589}
1590
1591void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001592 CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt8, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001593}
1594
1595void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001596 GenPoke(invoke->GetLocations(), DataType::Type::kInt8, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001597}
1598
1599void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001600 CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt32, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001601}
1602
1603void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001604 GenPoke(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001605}
1606
1607void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001608 CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt64, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001609}
1610
1611void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001612 GenPoke(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001613}
1614
1615void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001616 CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt16, invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001617}
1618
1619void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 GenPoke(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001621}
1622
1623void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001624 LocationSummary* locations =
1625 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001626 locations->SetOut(Location::RequiresRegister());
1627}
1628
1629void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1630 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07001631 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>()));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001632}
1633
Roland Levillain0d5a2812015-11-13 10:07:31 +00001634static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001635 DataType::Type type,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001636 bool is_volatile,
1637 CodeGeneratorX86* codegen) {
1638 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1639 LocationSummary* locations = invoke->GetLocations();
1640 Location base_loc = locations->InAt(1);
1641 Register base = base_loc.AsRegister<Register>();
1642 Location offset_loc = locations->InAt(2);
1643 Register offset = offset_loc.AsRegisterPairLow<Register>();
1644 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001645
1646 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001647 case DataType::Type::kInt32: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001648 Register output = output_loc.AsRegister<Register>();
1649 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain7c1559a2015-12-15 10:55:36 +00001650 break;
1651 }
1652
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001653 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00001654 Register output = output_loc.AsRegister<Register>();
1655 if (kEmitCompilerReadBarrier) {
1656 if (kUseBakerReadBarrier) {
Sang, Chunlei0fcd2b82016-04-05 17:12:59 +08001657 Address src(base, offset, ScaleFactor::TIMES_1, 0);
1658 codegen->GenerateReferenceLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08001659 invoke, output_loc, base, src, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00001660 } else {
1661 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1662 codegen->GenerateReadBarrierSlow(
1663 invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
1664 }
1665 } else {
1666 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1667 __ MaybeUnpoisonHeapReference(output);
Roland Levillain4d027112015-07-01 15:41:14 +01001668 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001669 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001670 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001671
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001672 case DataType::Type::kInt64: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001673 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1674 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001675 if (is_volatile) {
1676 // Need to use a XMM to read atomically.
1677 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1678 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1679 __ movd(output_lo, temp);
1680 __ psrlq(temp, Immediate(32));
1681 __ movd(output_hi, temp);
1682 } else {
1683 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1684 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1685 }
1686 }
1687 break;
1688
1689 default:
1690 LOG(FATAL) << "Unsupported op size " << type;
1691 UNREACHABLE();
1692 }
1693}
1694
Vladimir Markoca6fff82017-10-03 14:49:14 +01001695static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Roland Levillain7c1559a2015-12-15 10:55:36 +00001696 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001697 DataType::Type type,
Roland Levillain7c1559a2015-12-15 10:55:36 +00001698 bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001699 bool can_call = kEmitCompilerReadBarrier &&
1700 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1701 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001702 LocationSummary* locations =
1703 new (allocator) LocationSummary(invoke,
1704 can_call
1705 ? LocationSummary::kCallOnSlowPath
1706 : LocationSummary::kNoCall,
1707 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +01001708 if (can_call && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001709 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01001710 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001711 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1712 locations->SetInAt(1, Location::RequiresRegister());
1713 locations->SetInAt(2, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001714 if (type == DataType::Type::kInt64) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001715 if (is_volatile) {
1716 // Need to use XMM to read volatile.
1717 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain3d312422016-06-23 13:53:42 +01001718 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001719 } else {
1720 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1721 }
1722 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001723 locations->SetOut(Location::RequiresRegister(),
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001724 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001725 }
1726}
1727
1728void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001729 CreateIntIntIntToIntLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001730 allocator_, invoke, DataType::Type::kInt32, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001731}
1732void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001733 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001734}
1735void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001736 CreateIntIntIntToIntLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001737 allocator_, invoke, DataType::Type::kInt64, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001738}
1739void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001740 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001741}
1742void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001743 CreateIntIntIntToIntLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001744 allocator_, invoke, DataType::Type::kReference, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001745}
1746void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001747 CreateIntIntIntToIntLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001748 allocator_, invoke, DataType::Type::kReference, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001749}
1750
1751
1752void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001753 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001754}
1755void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001756 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001757}
1758void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001759 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001760}
1761void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001762 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001763}
1764void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001765 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001766}
1767void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001768 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001769}
1770
1771
Vladimir Markoca6fff82017-10-03 14:49:14 +01001772static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001773 DataType::Type type,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001774 HInvoke* invoke,
1775 bool is_volatile) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001776 LocationSummary* locations =
1777 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001778 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1779 locations->SetInAt(1, Location::RequiresRegister());
1780 locations->SetInAt(2, Location::RequiresRegister());
1781 locations->SetInAt(3, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001782 if (type == DataType::Type::kReference) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001783 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001784 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001785 // Ensure the value is in a byte register.
1786 locations->AddTemp(Location::RegisterLocation(ECX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001787 } else if (type == DataType::Type::kInt64 && is_volatile) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001788 locations->AddTemp(Location::RequiresFpuRegister());
1789 locations->AddTemp(Location::RequiresFpuRegister());
1790 }
1791}
1792
1793void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001794 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001795 allocator_, DataType::Type::kInt32, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001796}
1797void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001798 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001799 allocator_, DataType::Type::kInt32, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001800}
1801void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001802 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001803 allocator_, DataType::Type::kInt32, invoke, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001804}
1805void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001806 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001807 allocator_, DataType::Type::kReference, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001808}
1809void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001810 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001811 allocator_, DataType::Type::kReference, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001812}
1813void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001814 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001815 allocator_, DataType::Type::kReference, invoke, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001816}
1817void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001818 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001819 allocator_, DataType::Type::kInt64, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001820}
1821void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001822 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001823 allocator_, DataType::Type::kInt64, invoke, /* is_volatile= */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001824}
1825void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001826 CreateIntIntIntIntToVoidPlusTempsLocations(
Andreas Gampe3db70682018-12-26 15:12:03 -08001827 allocator_, DataType::Type::kInt64, invoke, /* is_volatile= */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001828}
1829
1830// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1831// memory model.
1832static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001833 DataType::Type type,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001834 bool is_volatile,
1835 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01001836 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001837 Register base = locations->InAt(1).AsRegister<Register>();
1838 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1839 Location value_loc = locations->InAt(3);
1840
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001841 if (type == DataType::Type::kInt64) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001842 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1843 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1844 if (is_volatile) {
1845 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1846 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1847 __ movd(temp1, value_lo);
1848 __ movd(temp2, value_hi);
1849 __ punpckldq(temp1, temp2);
1850 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1851 } else {
1852 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1853 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1854 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001855 } else if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01001856 Register temp = locations->GetTemp(0).AsRegister<Register>();
1857 __ movl(temp, value_loc.AsRegister<Register>());
1858 __ PoisonHeapReference(temp);
1859 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001860 } else {
1861 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1862 }
1863
1864 if (is_volatile) {
Mark P Mendell17077d82015-12-16 19:15:59 +00001865 codegen->MemoryFence();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001866 }
1867
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001868 if (type == DataType::Type::kReference) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001869 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001870 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1871 locations->GetTemp(1).AsRegister<Register>(),
1872 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001873 value_loc.AsRegister<Register>(),
1874 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001875 }
1876}
1877
1878void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001879 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001880}
1881void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001882 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001883}
1884void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001885 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001886}
1887void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001888 GenUnsafePut(
Andreas Gampe3db70682018-12-26 15:12:03 -08001889 invoke->GetLocations(), DataType::Type::kReference, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001890}
1891void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001892 GenUnsafePut(
Andreas Gampe3db70682018-12-26 15:12:03 -08001893 invoke->GetLocations(), DataType::Type::kReference, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001894}
1895void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001896 GenUnsafePut(
Andreas Gampe3db70682018-12-26 15:12:03 -08001897 invoke->GetLocations(), DataType::Type::kReference, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001898}
1899void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001900 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001901}
1902void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001903 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /* is_volatile= */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001904}
1905void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001906 GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /* is_volatile= */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001907}
1908
Vladimir Markoca6fff82017-10-03 14:49:14 +01001909static void CreateIntIntIntIntIntToInt(ArenaAllocator* allocator,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001910 DataType::Type type,
Mark Mendell58d25fd2015-04-03 14:52:31 -04001911 HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001912 bool can_call = kEmitCompilerReadBarrier &&
1913 kUseBakerReadBarrier &&
1914 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001915 LocationSummary* locations =
1916 new (allocator) LocationSummary(invoke,
1917 can_call
1918 ? LocationSummary::kCallOnSlowPath
1919 : LocationSummary::kNoCall,
1920 kIntrinsified);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001921 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1922 locations->SetInAt(1, Location::RequiresRegister());
1923 // Offset is a long, but in 32 bit mode, we only need the low word.
1924 // Can we update the invoke here to remove a TypeConvert to Long?
1925 locations->SetInAt(2, Location::RequiresRegister());
1926 // Expected value must be in EAX or EDX:EAX.
1927 // For long, new value must be in ECX:EBX.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001928 if (type == DataType::Type::kInt64) {
Mark Mendell58d25fd2015-04-03 14:52:31 -04001929 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1930 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1931 } else {
1932 locations->SetInAt(3, Location::RegisterLocation(EAX));
1933 locations->SetInAt(4, Location::RequiresRegister());
1934 }
1935
1936 // Force a byte register for the output.
1937 locations->SetOut(Location::RegisterLocation(EAX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001938 if (type == DataType::Type::kReference) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001939 // Need temporary registers for card-marking, and possibly for
1940 // (Baker) read barrier.
Roland Levillainb488b782015-10-22 11:38:49 +01001941 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04001942 // Need a byte register for marking.
1943 locations->AddTemp(Location::RegisterLocation(ECX));
1944 }
1945}
1946
1947void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001948 CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kInt32, invoke);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001949}
1950
1951void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001952 CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kInt64, invoke);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001953}
1954
1955void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001956 // The only read barrier implementation supporting the
1957 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1958 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain391b8662015-12-18 11:43:38 +00001959 return;
1960 }
1961
Vladimir Markoca6fff82017-10-03 14:49:14 +01001962 CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kReference, invoke);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001963}
1964
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001965static void GenCAS(DataType::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01001966 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04001967 LocationSummary* locations = invoke->GetLocations();
1968
1969 Register base = locations->InAt(1).AsRegister<Register>();
1970 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1971 Location out = locations->Out();
1972 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1973
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001974 // The address of the field within the holding object.
1975 Address field_addr(base, offset, ScaleFactor::TIMES_1, 0);
1976
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001977 if (type == DataType::Type::kReference) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001978 // The only read barrier implementation supporting the
1979 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1980 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1981
1982 Location temp1_loc = locations->GetTemp(0);
1983 Register temp1 = temp1_loc.AsRegister<Register>();
1984 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1985
Roland Levillain4d027112015-07-01 15:41:14 +01001986 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01001987 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01001988 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001989 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01001990
Roland Levillainb488b782015-10-22 11:38:49 +01001991 // Mark card for object assuming new value is stored.
1992 bool value_can_be_null = true; // TODO: Worth finding out this information?
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001993 codegen->MarkGCCard(temp1, temp2, base, value, value_can_be_null);
1994
1995 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1996 // Need to make sure the reference stored in the field is a to-space
1997 // one before attempting the CAS or the CAS could fail incorrectly.
1998 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1999 invoke,
2000 temp1_loc, // Unused, used only as a "temporary" within the read barrier.
2001 base,
2002 field_addr,
Andreas Gampe3db70682018-12-26 15:12:03 -08002003 /* needs_null_check= */ false,
2004 /* always_update_field= */ true,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002005 &temp2);
2006 }
Roland Levillainb488b782015-10-22 11:38:49 +01002007
2008 bool base_equals_value = (base == value);
2009 if (kPoisonHeapReferences) {
2010 if (base_equals_value) {
2011 // If `base` and `value` are the same register location, move
2012 // `value` to a temporary register. This way, poisoning
2013 // `value` won't invalidate `base`.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002014 value = temp1;
Roland Levillainb488b782015-10-22 11:38:49 +01002015 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002016 }
Roland Levillainb488b782015-10-22 11:38:49 +01002017
2018 // Check that the register allocator did not assign the location
2019 // of `expected` (EAX) to `value` nor to `base`, so that heap
2020 // poisoning (when enabled) works as intended below.
2021 // - If `value` were equal to `expected`, both references would
2022 // be poisoned twice, meaning they would not be poisoned at
2023 // all, as heap poisoning uses address negation.
2024 // - If `base` were equal to `expected`, poisoning `expected`
2025 // would invalidate `base`.
2026 DCHECK_NE(value, expected);
2027 DCHECK_NE(base, expected);
2028
2029 __ PoisonHeapReference(expected);
2030 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002031 }
2032
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002033 __ LockCmpxchgl(field_addr, value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002034
Roland Levillain0d5a2812015-11-13 10:07:31 +00002035 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002036 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002037
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002038 // Convert ZF into the Boolean result.
Roland Levillainb488b782015-10-22 11:38:49 +01002039 __ setb(kZero, out.AsRegister<Register>());
2040 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002041
Roland Levillain391b8662015-12-18 11:43:38 +00002042 // If heap poisoning is enabled, we need to unpoison the values
2043 // that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002044 if (kPoisonHeapReferences) {
2045 if (base_equals_value) {
2046 // `value` has been moved to a temporary register, no need to
2047 // unpoison it.
2048 } else {
2049 // Ensure `value` is different from `out`, so that unpoisoning
2050 // the former does not invalidate the latter.
2051 DCHECK_NE(value, out.AsRegister<Register>());
2052 __ UnpoisonHeapReference(value);
2053 }
2054 // Do not unpoison the reference contained in register
2055 // `expected`, as it is the same as register `out` (EAX).
2056 }
2057 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002058 if (type == DataType::Type::kInt32) {
Roland Levillainb488b782015-10-22 11:38:49 +01002059 // Ensure the expected value is in EAX (required by the CMPXCHG
2060 // instruction).
2061 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002062 __ LockCmpxchgl(field_addr, locations->InAt(4).AsRegister<Register>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002063 } else if (type == DataType::Type::kInt64) {
Roland Levillainb488b782015-10-22 11:38:49 +01002064 // Ensure the expected value is in EAX:EDX and that the new
2065 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2066 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2067 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2068 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2069 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002070 __ LockCmpxchg8b(field_addr);
Roland Levillainb488b782015-10-22 11:38:49 +01002071 } else {
2072 LOG(FATAL) << "Unexpected CAS type " << type;
2073 }
2074
Roland Levillain0d5a2812015-11-13 10:07:31 +00002075 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2076 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002077
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002078 // Convert ZF into the Boolean result.
Roland Levillainb488b782015-10-22 11:38:49 +01002079 __ setb(kZero, out.AsRegister<Register>());
2080 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002081 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002082}
2083
2084void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002085 GenCAS(DataType::Type::kInt32, invoke, codegen_);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002086}
2087
2088void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002089 GenCAS(DataType::Type::kInt64, invoke, codegen_);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002090}
2091
2092void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002093 // The only read barrier implementation supporting the
2094 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2095 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01002096
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002097 GenCAS(DataType::Type::kReference, invoke, codegen_);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002098}
2099
2100void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002101 LocationSummary* locations =
2102 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002103 locations->SetInAt(0, Location::RequiresRegister());
2104 locations->SetOut(Location::SameAsFirstInput());
2105 locations->AddTemp(Location::RequiresRegister());
2106}
2107
2108static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2109 X86Assembler* assembler) {
2110 Immediate imm_shift(shift);
2111 Immediate imm_mask(mask);
2112 __ movl(temp, reg);
2113 __ shrl(reg, imm_shift);
2114 __ andl(temp, imm_mask);
2115 __ andl(reg, imm_mask);
2116 __ shll(temp, imm_shift);
2117 __ orl(reg, temp);
2118}
2119
2120void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002121 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002122 LocationSummary* locations = invoke->GetLocations();
2123
2124 Register reg = locations->InAt(0).AsRegister<Register>();
2125 Register temp = locations->GetTemp(0).AsRegister<Register>();
2126
2127 /*
2128 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2129 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2130 * compared to generic luni implementation which has 5 rounds of swapping bits.
2131 * x = bswap x
2132 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2133 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2134 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2135 */
2136 __ bswapl(reg);
2137 SwapBits(reg, temp, 1, 0x55555555, assembler);
2138 SwapBits(reg, temp, 2, 0x33333333, assembler);
2139 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2140}
2141
2142void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002143 LocationSummary* locations =
2144 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002145 locations->SetInAt(0, Location::RequiresRegister());
2146 locations->SetOut(Location::SameAsFirstInput());
2147 locations->AddTemp(Location::RequiresRegister());
2148}
2149
2150void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002151 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002152 LocationSummary* locations = invoke->GetLocations();
2153
2154 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2155 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2156 Register temp = locations->GetTemp(0).AsRegister<Register>();
2157
2158 // We want to swap high/low, then bswap each one, and then do the same
2159 // as a 32 bit reverse.
2160 // Exchange high and low.
2161 __ movl(temp, reg_low);
2162 __ movl(reg_low, reg_high);
2163 __ movl(reg_high, temp);
2164
2165 // bit-reverse low
2166 __ bswapl(reg_low);
2167 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2168 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2169 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2170
2171 // bit-reverse high
2172 __ bswapl(reg_high);
2173 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2174 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2175 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2176}
2177
Aart Bikc39dac12016-01-21 08:59:48 -08002178static void CreateBitCountLocations(
Vladimir Markoca6fff82017-10-03 14:49:14 +01002179 ArenaAllocator* allocator, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002180 if (!codegen->GetInstructionSetFeatures().HasPopCnt()) {
2181 // Do nothing if there is no popcnt support. This results in generating
2182 // a call for the intrinsic rather than direct code.
2183 return;
2184 }
Vladimir Markoca6fff82017-10-03 14:49:14 +01002185 LocationSummary* locations =
2186 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Aart Bikc39dac12016-01-21 08:59:48 -08002187 if (is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002188 locations->AddTemp(Location::RequiresRegister());
Aart Bikc39dac12016-01-21 08:59:48 -08002189 }
Aart Bik2a946072016-01-21 12:49:00 -08002190 locations->SetInAt(0, Location::Any());
Aart Bikc39dac12016-01-21 08:59:48 -08002191 locations->SetOut(Location::RequiresRegister());
2192}
2193
Aart Bika19616e2016-02-01 18:57:58 -08002194static void GenBitCount(X86Assembler* assembler,
2195 CodeGeneratorX86* codegen,
2196 HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002197 LocationSummary* locations = invoke->GetLocations();
2198 Location src = locations->InAt(0);
2199 Register out = locations->Out().AsRegister<Register>();
2200
2201 if (invoke->InputAt(0)->IsConstant()) {
2202 // Evaluate this at compile time.
2203 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
Roland Levillainfa3912e2016-04-01 18:21:55 +01002204 int32_t result = is_long
Aart Bikc39dac12016-01-21 08:59:48 -08002205 ? POPCOUNT(static_cast<uint64_t>(value))
2206 : POPCOUNT(static_cast<uint32_t>(value));
Roland Levillainfa3912e2016-04-01 18:21:55 +01002207 codegen->Load32BitValue(out, result);
Aart Bikc39dac12016-01-21 08:59:48 -08002208 return;
2209 }
2210
2211 // Handle the non-constant cases.
2212 if (!is_long) {
2213 if (src.IsRegister()) {
2214 __ popcntl(out, src.AsRegister<Register>());
2215 } else {
2216 DCHECK(src.IsStackSlot());
2217 __ popcntl(out, Address(ESP, src.GetStackIndex()));
2218 }
Aart Bik2a946072016-01-21 12:49:00 -08002219 } else {
2220 // The 64-bit case needs to worry about two parts.
2221 Register temp = locations->GetTemp(0).AsRegister<Register>();
2222 if (src.IsRegisterPair()) {
2223 __ popcntl(temp, src.AsRegisterPairLow<Register>());
2224 __ popcntl(out, src.AsRegisterPairHigh<Register>());
2225 } else {
2226 DCHECK(src.IsDoubleStackSlot());
2227 __ popcntl(temp, Address(ESP, src.GetStackIndex()));
2228 __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize)));
2229 }
2230 __ addl(out, temp);
Aart Bikc39dac12016-01-21 08:59:48 -08002231 }
Aart Bikc39dac12016-01-21 08:59:48 -08002232}
2233
2234void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002235 CreateBitCountLocations(allocator_, codegen_, invoke, /* is_long= */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002236}
2237
2238void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002239 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long= */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002240}
2241
2242void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002243 CreateBitCountLocations(allocator_, codegen_, invoke, /* is_long= */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002244}
2245
2246void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002247 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long= */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002248}
2249
Vladimir Markoca6fff82017-10-03 14:49:14 +01002250static void CreateLeadingZeroLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is_long) {
2251 LocationSummary* locations =
2252 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendelld5897672015-08-12 21:16:41 -04002253 if (is_long) {
2254 locations->SetInAt(0, Location::RequiresRegister());
2255 } else {
2256 locations->SetInAt(0, Location::Any());
2257 }
2258 locations->SetOut(Location::RequiresRegister());
2259}
2260
Aart Bika19616e2016-02-01 18:57:58 -08002261static void GenLeadingZeros(X86Assembler* assembler,
2262 CodeGeneratorX86* codegen,
2263 HInvoke* invoke, bool is_long) {
Mark Mendelld5897672015-08-12 21:16:41 -04002264 LocationSummary* locations = invoke->GetLocations();
2265 Location src = locations->InAt(0);
2266 Register out = locations->Out().AsRegister<Register>();
2267
2268 if (invoke->InputAt(0)->IsConstant()) {
2269 // Evaluate this at compile time.
2270 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2271 if (value == 0) {
2272 value = is_long ? 64 : 32;
2273 } else {
2274 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2275 }
Aart Bika19616e2016-02-01 18:57:58 -08002276 codegen->Load32BitValue(out, value);
Mark Mendelld5897672015-08-12 21:16:41 -04002277 return;
2278 }
2279
2280 // Handle the non-constant cases.
2281 if (!is_long) {
2282 if (src.IsRegister()) {
2283 __ bsrl(out, src.AsRegister<Register>());
2284 } else {
2285 DCHECK(src.IsStackSlot());
2286 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2287 }
2288
2289 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002290 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002291 __ j(kEqual, &all_zeroes);
2292
2293 // Correct the result from BSR to get the final CLZ result.
2294 __ xorl(out, Immediate(31));
2295 __ jmp(&done);
2296
2297 // Fix the zero case with the expected result.
2298 __ Bind(&all_zeroes);
2299 __ movl(out, Immediate(32));
2300
2301 __ Bind(&done);
2302 return;
2303 }
2304
2305 // 64 bit case needs to worry about both parts of the register.
2306 DCHECK(src.IsRegisterPair());
2307 Register src_lo = src.AsRegisterPairLow<Register>();
2308 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002309 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002310
2311 // Is the high word zero?
2312 __ testl(src_hi, src_hi);
2313 __ j(kEqual, &handle_low);
2314
2315 // High word is not zero. We know that the BSR result is defined in this case.
2316 __ bsrl(out, src_hi);
2317
2318 // Correct the result from BSR to get the final CLZ result.
2319 __ xorl(out, Immediate(31));
2320 __ jmp(&done);
2321
2322 // High word was zero. We have to compute the low word count and add 32.
2323 __ Bind(&handle_low);
2324 __ bsrl(out, src_lo);
2325 __ j(kEqual, &all_zeroes);
2326
2327 // We had a valid result. Use an XOR to both correct the result and add 32.
2328 __ xorl(out, Immediate(63));
2329 __ jmp(&done);
2330
2331 // All zero case.
2332 __ Bind(&all_zeroes);
2333 __ movl(out, Immediate(64));
2334
2335 __ Bind(&done);
2336}
2337
2338void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002339 CreateLeadingZeroLocations(allocator_, invoke, /* is_long= */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002340}
2341
2342void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002343 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002344}
2345
2346void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002347 CreateLeadingZeroLocations(allocator_, invoke, /* is_long= */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002348}
2349
2350void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002351 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002352}
2353
Vladimir Markoca6fff82017-10-03 14:49:14 +01002354static void CreateTrailingZeroLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is_long) {
2355 LocationSummary* locations =
2356 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Mark Mendell2d554792015-09-15 21:45:18 -04002357 if (is_long) {
2358 locations->SetInAt(0, Location::RequiresRegister());
2359 } else {
2360 locations->SetInAt(0, Location::Any());
2361 }
2362 locations->SetOut(Location::RequiresRegister());
2363}
2364
Aart Bika19616e2016-02-01 18:57:58 -08002365static void GenTrailingZeros(X86Assembler* assembler,
2366 CodeGeneratorX86* codegen,
2367 HInvoke* invoke, bool is_long) {
Mark Mendell2d554792015-09-15 21:45:18 -04002368 LocationSummary* locations = invoke->GetLocations();
2369 Location src = locations->InAt(0);
2370 Register out = locations->Out().AsRegister<Register>();
2371
2372 if (invoke->InputAt(0)->IsConstant()) {
2373 // Evaluate this at compile time.
2374 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2375 if (value == 0) {
2376 value = is_long ? 64 : 32;
2377 } else {
2378 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2379 }
Aart Bika19616e2016-02-01 18:57:58 -08002380 codegen->Load32BitValue(out, value);
Mark Mendell2d554792015-09-15 21:45:18 -04002381 return;
2382 }
2383
2384 // Handle the non-constant cases.
2385 if (!is_long) {
2386 if (src.IsRegister()) {
2387 __ bsfl(out, src.AsRegister<Register>());
2388 } else {
2389 DCHECK(src.IsStackSlot());
2390 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2391 }
2392
2393 // BSF sets ZF if the input was zero, and the output is undefined.
2394 NearLabel done;
2395 __ j(kNotEqual, &done);
2396
2397 // Fix the zero case with the expected result.
2398 __ movl(out, Immediate(32));
2399
2400 __ Bind(&done);
2401 return;
2402 }
2403
2404 // 64 bit case needs to worry about both parts of the register.
2405 DCHECK(src.IsRegisterPair());
2406 Register src_lo = src.AsRegisterPairLow<Register>();
2407 Register src_hi = src.AsRegisterPairHigh<Register>();
2408 NearLabel done, all_zeroes;
2409
2410 // If the low word is zero, then ZF will be set. If not, we have the answer.
2411 __ bsfl(out, src_lo);
2412 __ j(kNotEqual, &done);
2413
2414 // Low word was zero. We have to compute the high word count and add 32.
2415 __ bsfl(out, src_hi);
2416 __ j(kEqual, &all_zeroes);
2417
2418 // We had a valid result. Add 32 to account for the low word being zero.
2419 __ addl(out, Immediate(32));
2420 __ jmp(&done);
2421
2422 // All zero case.
2423 __ Bind(&all_zeroes);
2424 __ movl(out, Immediate(64));
2425
2426 __ Bind(&done);
2427}
2428
2429void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002430 CreateTrailingZeroLocations(allocator_, invoke, /* is_long= */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002431}
2432
2433void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002434 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002435}
2436
2437void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002438 CreateTrailingZeroLocations(allocator_, invoke, /* is_long= */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002439}
2440
2441void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08002442 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002443}
2444
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002445static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) {
2446 return instruction->InputAt(input0) == instruction->InputAt(input1);
2447}
2448
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002449// Compute base address for the System.arraycopy intrinsic in `base`.
2450static void GenSystemArrayCopyBaseAddress(X86Assembler* assembler,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002451 DataType::Type type,
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002452 const Register& array,
2453 const Location& pos,
2454 const Register& base) {
2455 // This routine is only used by the SystemArrayCopy intrinsic at the
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002456 // moment. We can allow DataType::Type::kReference as `type` to implement
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002457 // the SystemArrayCopyChar intrinsic.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002458 DCHECK_EQ(type, DataType::Type::kReference);
2459 const int32_t element_size = DataType::Size(type);
2460 const ScaleFactor scale_factor = static_cast<ScaleFactor>(DataType::SizeShift(type));
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002461 const uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
2462
2463 if (pos.IsConstant()) {
2464 int32_t constant = pos.GetConstant()->AsIntConstant()->GetValue();
2465 __ leal(base, Address(array, element_size * constant + data_offset));
2466 } else {
2467 __ leal(base, Address(array, pos.AsRegister<Register>(), scale_factor, data_offset));
2468 }
2469}
2470
2471// Compute end source address for the System.arraycopy intrinsic in `end`.
2472static void GenSystemArrayCopyEndAddress(X86Assembler* assembler,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002473 DataType::Type type,
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002474 const Location& copy_length,
2475 const Register& base,
2476 const Register& end) {
2477 // This routine is only used by the SystemArrayCopy intrinsic at the
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002478 // moment. We can allow DataType::Type::kReference as `type` to implement
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002479 // the SystemArrayCopyChar intrinsic.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002480 DCHECK_EQ(type, DataType::Type::kReference);
2481 const int32_t element_size = DataType::Size(type);
2482 const ScaleFactor scale_factor = static_cast<ScaleFactor>(DataType::SizeShift(type));
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002483
2484 if (copy_length.IsConstant()) {
2485 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
2486 __ leal(end, Address(base, element_size * constant));
2487 } else {
2488 __ leal(end, Address(base, copy_length.AsRegister<Register>(), scale_factor, 0));
2489 }
2490}
2491
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002492void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002493 // The only read barrier implementation supporting the
2494 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2495 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002496 return;
2497 }
2498
2499 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
2500 if (invoke->GetLocations() != nullptr) {
2501 // Need a byte register for marking.
2502 invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX));
2503
2504 static constexpr size_t kSrc = 0;
2505 static constexpr size_t kSrcPos = 1;
2506 static constexpr size_t kDest = 2;
2507 static constexpr size_t kDestPos = 3;
2508 static constexpr size_t kLength = 4;
2509
2510 if (!invoke->InputAt(kSrcPos)->IsIntConstant() &&
2511 !invoke->InputAt(kDestPos)->IsIntConstant() &&
2512 !invoke->InputAt(kLength)->IsIntConstant()) {
2513 if (!IsSameInput(invoke, kSrcPos, kDestPos) &&
2514 !IsSameInput(invoke, kSrcPos, kLength) &&
2515 !IsSameInput(invoke, kDestPos, kLength) &&
2516 !IsSameInput(invoke, kSrc, kDest)) {
2517 // Not enough registers, make the length also take a stack slot.
2518 invoke->GetLocations()->SetInAt(kLength, Location::Any());
2519 }
2520 }
2521 }
2522}
2523
2524void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002525 // The only read barrier implementation supporting the
2526 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2527 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002528
2529 X86Assembler* assembler = GetAssembler();
2530 LocationSummary* locations = invoke->GetLocations();
2531
2532 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2533 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2534 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2535 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002536 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002537
2538 Register src = locations->InAt(0).AsRegister<Register>();
2539 Location src_pos = locations->InAt(1);
2540 Register dest = locations->InAt(2).AsRegister<Register>();
2541 Location dest_pos = locations->InAt(3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002542 Location length_arg = locations->InAt(4);
2543 Location length = length_arg;
2544 Location temp1_loc = locations->GetTemp(0);
2545 Register temp1 = temp1_loc.AsRegister<Register>();
2546 Location temp2_loc = locations->GetTemp(1);
2547 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002548
Vladimir Marko174b2e22017-10-12 13:34:49 +01002549 SlowPathCode* intrinsic_slow_path =
2550 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
Roland Levillain0b671c02016-08-19 12:02:34 +01002551 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002552
2553 NearLabel conditions_on_positions_validated;
2554 SystemArrayCopyOptimizations optimizations(invoke);
2555
2556 // If source and destination are the same, we go to slow path if we need to do
2557 // forward copying.
2558 if (src_pos.IsConstant()) {
2559 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2560 if (dest_pos.IsConstant()) {
2561 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2562 if (optimizations.GetDestinationIsSource()) {
2563 // Checked when building locations.
2564 DCHECK_GE(src_pos_constant, dest_pos_constant);
2565 } else if (src_pos_constant < dest_pos_constant) {
2566 __ cmpl(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002567 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002568 }
2569 } else {
2570 if (!optimizations.GetDestinationIsSource()) {
2571 __ cmpl(src, dest);
2572 __ j(kNotEqual, &conditions_on_positions_validated);
2573 }
2574 __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002575 __ j(kGreater, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002576 }
2577 } else {
2578 if (!optimizations.GetDestinationIsSource()) {
2579 __ cmpl(src, dest);
2580 __ j(kNotEqual, &conditions_on_positions_validated);
2581 }
2582 if (dest_pos.IsConstant()) {
2583 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2584 __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002585 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002586 } else {
2587 __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002588 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002589 }
2590 }
2591
2592 __ Bind(&conditions_on_positions_validated);
2593
2594 if (!optimizations.GetSourceIsNotNull()) {
2595 // Bail out if the source is null.
2596 __ testl(src, src);
Roland Levillain0b671c02016-08-19 12:02:34 +01002597 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002598 }
2599
2600 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2601 // Bail out if the destination is null.
2602 __ testl(dest, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002603 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002604 }
2605
Roland Levillain0b671c02016-08-19 12:02:34 +01002606 Location temp3_loc = locations->GetTemp(2);
2607 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002608 if (length.IsStackSlot()) {
2609 __ movl(temp3, Address(ESP, length.GetStackIndex()));
2610 length = Location::RegisterLocation(temp3);
2611 }
2612
2613 // If the length is negative, bail out.
2614 // We have already checked in the LocationsBuilder for the constant case.
2615 if (!length.IsConstant() &&
2616 !optimizations.GetCountIsSourceLength() &&
2617 !optimizations.GetCountIsDestinationLength()) {
2618 __ testl(length.AsRegister<Register>(), length.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002619 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002620 }
2621
2622 // Validity checks: source.
2623 CheckPosition(assembler,
2624 src_pos,
2625 src,
2626 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002627 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002628 temp1,
2629 optimizations.GetCountIsSourceLength());
2630
2631 // Validity checks: dest.
2632 CheckPosition(assembler,
2633 dest_pos,
2634 dest,
2635 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002636 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002637 temp1,
2638 optimizations.GetCountIsDestinationLength());
2639
2640 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2641 // Check whether all elements of the source array are assignable to the component
2642 // type of the destination array. We do two checks: the classes are the same,
2643 // or the destination is Object[]. If none of these checks succeed, we go to the
2644 // slow path.
Roland Levillain0b671c02016-08-19 12:02:34 +01002645
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002646 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002647 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2648 // /* HeapReference<Class> */ temp1 = src->klass_
2649 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002650 invoke, temp1_loc, src, class_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002651 // Bail out if the source is not a non primitive array.
2652 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2653 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002654 invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002655 __ testl(temp1, temp1);
2656 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2657 // If heap poisoning is enabled, `temp1` has been unpoisoned
2658 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2659 } else {
2660 // /* HeapReference<Class> */ temp1 = src->klass_
2661 __ movl(temp1, Address(src, class_offset));
2662 __ MaybeUnpoisonHeapReference(temp1);
2663 // Bail out if the source is not a non primitive array.
2664 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2665 __ movl(temp1, Address(temp1, component_offset));
2666 __ testl(temp1, temp1);
2667 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2668 __ MaybeUnpoisonHeapReference(temp1);
2669 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002670 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01002671 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002672 }
2673
Roland Levillain0b671c02016-08-19 12:02:34 +01002674 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2675 if (length.Equals(Location::RegisterLocation(temp3))) {
2676 // When Baker read barriers are enabled, register `temp3`,
2677 // which in the present case contains the `length` parameter,
2678 // will be overwritten below. Make the `length` location
2679 // reference the original stack location; it will be moved
2680 // back to `temp3` later if necessary.
2681 DCHECK(length_arg.IsStackSlot());
2682 length = length_arg;
2683 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002684
Roland Levillain0b671c02016-08-19 12:02:34 +01002685 // /* HeapReference<Class> */ temp1 = dest->klass_
2686 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002687 invoke, temp1_loc, dest, class_offset, /* needs_null_check= */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002688
Roland Levillain0b671c02016-08-19 12:02:34 +01002689 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2690 // Bail out if the destination is not a non primitive array.
2691 //
2692 // Register `temp1` is not trashed by the read barrier emitted
2693 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2694 // method produces a call to a ReadBarrierMarkRegX entry point,
2695 // which saves all potentially live registers, including
2696 // temporaries such a `temp1`.
2697 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2698 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002699 invoke, temp2_loc, temp1, component_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002700 __ testl(temp2, temp2);
2701 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2702 // If heap poisoning is enabled, `temp2` has been unpoisoned
2703 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2704 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
2705 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2706 }
2707
2708 // For the same reason given earlier, `temp1` is not trashed by the
2709 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2710 // /* HeapReference<Class> */ temp2 = src->klass_
2711 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002712 invoke, temp2_loc, src, class_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002713 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2714 __ cmpl(temp1, temp2);
2715
2716 if (optimizations.GetDestinationIsTypedObjectArray()) {
2717 NearLabel do_copy;
2718 __ j(kEqual, &do_copy);
2719 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2720 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002721 invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002722 // We do not need to emit a read barrier for the following
2723 // heap reference load, as `temp1` is only used in a
2724 // comparison with null below, and this reference is not
2725 // kept afterwards.
2726 __ cmpl(Address(temp1, super_offset), Immediate(0));
2727 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2728 __ Bind(&do_copy);
2729 } else {
2730 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2731 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002732 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01002733 // Non read barrier code.
2734
2735 // /* HeapReference<Class> */ temp1 = dest->klass_
2736 __ movl(temp1, Address(dest, class_offset));
2737 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2738 __ MaybeUnpoisonHeapReference(temp1);
2739 // Bail out if the destination is not a non primitive array.
2740 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2741 __ movl(temp2, Address(temp1, component_offset));
2742 __ testl(temp2, temp2);
2743 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2744 __ MaybeUnpoisonHeapReference(temp2);
2745 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
2746 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2747 // Re-poison the heap reference to make the compare instruction below
2748 // compare two poisoned references.
2749 __ PoisonHeapReference(temp1);
2750 }
2751
2752 // Note: if heap poisoning is on, we are comparing two poisoned references here.
2753 __ cmpl(temp1, Address(src, class_offset));
2754
2755 if (optimizations.GetDestinationIsTypedObjectArray()) {
2756 NearLabel do_copy;
2757 __ j(kEqual, &do_copy);
2758 __ MaybeUnpoisonHeapReference(temp1);
2759 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2760 __ movl(temp1, Address(temp1, component_offset));
2761 __ MaybeUnpoisonHeapReference(temp1);
2762 __ cmpl(Address(temp1, super_offset), Immediate(0));
2763 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2764 __ Bind(&do_copy);
2765 } else {
2766 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2767 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002768 }
2769 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2770 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2771 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01002772 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2773 // /* HeapReference<Class> */ temp1 = src->klass_
2774 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002775 invoke, temp1_loc, src, class_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002776 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2777 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002778 invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002779 __ testl(temp1, temp1);
2780 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2781 // If heap poisoning is enabled, `temp1` has been unpoisoned
2782 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2783 } else {
2784 // /* HeapReference<Class> */ temp1 = src->klass_
2785 __ movl(temp1, Address(src, class_offset));
2786 __ MaybeUnpoisonHeapReference(temp1);
2787 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2788 __ movl(temp1, Address(temp1, component_offset));
2789 __ testl(temp1, temp1);
2790 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2791 __ MaybeUnpoisonHeapReference(temp1);
2792 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002793 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01002794 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002795 }
2796
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002797 const DataType::Type type = DataType::Type::kReference;
2798 const int32_t element_size = DataType::Size(type);
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002799
Roland Levillain0b671c02016-08-19 12:02:34 +01002800 // Compute the base source address in `temp1`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002801 GenSystemArrayCopyBaseAddress(GetAssembler(), type, src, src_pos, temp1);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002802
Roland Levillain0b671c02016-08-19 12:02:34 +01002803 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2804 // If it is needed (in the case of the fast-path loop), the base
2805 // destination address is computed later, as `temp2` is used for
2806 // intermediate computations.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002807
Roland Levillain0b671c02016-08-19 12:02:34 +01002808 // Compute the end source address in `temp3`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002809 if (length.IsStackSlot()) {
2810 // Location `length` is again pointing at a stack slot, as
2811 // register `temp3` (which was containing the length parameter
2812 // earlier) has been overwritten; restore it now
2813 DCHECK(length.Equals(length_arg));
2814 __ movl(temp3, Address(ESP, length.GetStackIndex()));
2815 length = Location::RegisterLocation(temp3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002816 }
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002817 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002818
Roland Levillain0b671c02016-08-19 12:02:34 +01002819 // SystemArrayCopy implementation for Baker read barriers (see
2820 // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier):
2821 //
2822 // if (src_ptr != end_ptr) {
2823 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
2824 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002825 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain0b671c02016-08-19 12:02:34 +01002826 // if (is_gray) {
2827 // // Slow-path copy.
2828 // for (size_t i = 0; i != length; ++i) {
2829 // dest_array[dest_pos + i] =
2830 // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i])));
2831 // }
2832 // } else {
2833 // // Fast-path copy.
2834 // do {
2835 // *dest_ptr++ = *src_ptr++;
2836 // } while (src_ptr != end_ptr)
2837 // }
2838 // }
2839
2840 NearLabel loop, done;
2841
2842 // Don't enter copy loop if `length == 0`.
2843 __ cmpl(temp1, temp3);
2844 __ j(kEqual, &done);
2845
Vladimir Marko953437b2016-08-24 08:30:46 +00002846 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01002847 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002848 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00002849 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
2850 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
2851 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
2852
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002853 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00002854 // goto slow_path;
2855 // At this point, just do the "if" and make sure that flags are preserved until the branch.
2856 __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain0b671c02016-08-19 12:02:34 +01002857
2858 // Load fence to prevent load-load reordering.
2859 // Note that this is a no-op, thanks to the x86 memory model.
2860 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2861
2862 // Slow path used to copy array when `src` is gray.
2863 SlowPathCode* read_barrier_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002864 new (codegen_->GetScopedAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke);
Roland Levillain0b671c02016-08-19 12:02:34 +01002865 codegen_->AddSlowPath(read_barrier_slow_path);
2866
Vladimir Marko953437b2016-08-24 08:30:46 +00002867 // We have done the "if" of the gray bit check above, now branch based on the flags.
2868 __ j(kNotZero, read_barrier_slow_path->GetEntryLabel());
Roland Levillain0b671c02016-08-19 12:02:34 +01002869
2870 // Fast-path copy.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002871 // Compute the base destination address in `temp2`.
2872 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
Roland Levillain0b671c02016-08-19 12:02:34 +01002873 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2874 // poison/unpoison.
2875 __ Bind(&loop);
2876 __ pushl(Address(temp1, 0));
2877 __ cfi().AdjustCFAOffset(4);
2878 __ popl(Address(temp2, 0));
2879 __ cfi().AdjustCFAOffset(-4);
2880 __ addl(temp1, Immediate(element_size));
2881 __ addl(temp2, Immediate(element_size));
2882 __ cmpl(temp1, temp3);
2883 __ j(kNotEqual, &loop);
2884
2885 __ Bind(read_barrier_slow_path->GetExitLabel());
2886 __ Bind(&done);
2887 } else {
2888 // Non read barrier code.
Roland Levillain0b671c02016-08-19 12:02:34 +01002889 // Compute the base destination address in `temp2`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002890 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
Roland Levillain0b671c02016-08-19 12:02:34 +01002891 // Compute the end source address in `temp3`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002892 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002893 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2894 // poison/unpoison.
2895 NearLabel loop, done;
2896 __ cmpl(temp1, temp3);
2897 __ j(kEqual, &done);
2898 __ Bind(&loop);
2899 __ pushl(Address(temp1, 0));
2900 __ cfi().AdjustCFAOffset(4);
2901 __ popl(Address(temp2, 0));
2902 __ cfi().AdjustCFAOffset(-4);
2903 __ addl(temp1, Immediate(element_size));
2904 __ addl(temp2, Immediate(element_size));
2905 __ cmpl(temp1, temp3);
2906 __ j(kNotEqual, &loop);
2907 __ Bind(&done);
2908 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002909
2910 // We only need one card marking on the destination array.
Andreas Gampe3db70682018-12-26 15:12:03 -08002911 codegen_->MarkGCCard(temp1, temp2, dest, Register(kNoRegister), /* value_can_be_null= */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002912
Roland Levillain0b671c02016-08-19 12:02:34 +01002913 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002914}
2915
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002916void IntrinsicLocationsBuilderX86::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002917 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002918 InvokeRuntimeCallingConvention calling_convention;
2919 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2920 invoke,
2921 codegen_,
2922 Location::RegisterLocation(EAX),
2923 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Markoeebb8212018-06-05 14:57:24 +01002924
2925 LocationSummary* locations = invoke->GetLocations();
2926 if (locations != nullptr) {
2927 HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect();
2928 if (invoke_static_or_direct->HasSpecialInput() &&
2929 invoke->InputAt(invoke_static_or_direct->GetSpecialInputIndex())
2930 ->IsX86ComputeBaseMethodAddress()) {
2931 locations->SetInAt(invoke_static_or_direct->GetSpecialInputIndex(),
2932 Location::RequiresRegister());
2933 }
2934 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002935}
2936
2937void IntrinsicCodeGeneratorX86::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002938 DCHECK(invoke->IsInvokeStaticOrDirect());
Vladimir Marko6fd16062018-06-26 11:02:04 +01002939 IntrinsicVisitor::IntegerValueOfInfo info =
2940 IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002941 LocationSummary* locations = invoke->GetLocations();
2942 X86Assembler* assembler = GetAssembler();
2943
2944 Register out = locations->Out().AsRegister<Register>();
2945 InvokeRuntimeCallingConvention calling_convention;
2946 if (invoke->InputAt(0)->IsConstant()) {
2947 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
Vladimir Marko6fd16062018-06-26 11:02:04 +01002948 if (static_cast<uint32_t>(value - info.low) < info.length) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002949 // Just embed the j.l.Integer in the code.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002950 DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002951 codegen_->LoadBootImageAddress(
Vladimir Marko6fd16062018-06-26 11:02:04 +01002952 out, info.value_boot_image_reference, invoke->AsInvokeStaticOrDirect());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002953 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002954 DCHECK(locations->CanCall());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002955 // Allocate and initialize a new j.l.Integer.
2956 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2957 // JIT object table.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002958 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2959 info.integer_boot_image_offset);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002960 __ movl(Address(out, info.value_offset), Immediate(value));
2961 }
2962 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002963 DCHECK(locations->CanCall());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002964 Register in = locations->InAt(0).AsRegister<Register>();
2965 // Check bounds of our cache.
2966 __ leal(out, Address(in, -info.low));
Vladimir Markoeebb8212018-06-05 14:57:24 +01002967 __ cmpl(out, Immediate(info.length));
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002968 NearLabel allocate, done;
2969 __ j(kAboveEqual, &allocate);
2970 // If the value is within the bounds, load the j.l.Integer directly from the array.
Vladimir Markoeebb8212018-06-05 14:57:24 +01002971 constexpr size_t kElementSize = sizeof(mirror::HeapReference<mirror::Object>);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002972 static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>),
2973 "Check heap reference size.");
Vladimir Marko6fd16062018-06-26 11:02:04 +01002974 if (codegen_->GetCompilerOptions().IsBootImage()) {
2975 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
2976 size_t method_address_index = invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex();
2977 HX86ComputeBaseMethodAddress* method_address =
2978 invoke->InputAt(method_address_index)->AsX86ComputeBaseMethodAddress();
2979 DCHECK(method_address != nullptr);
2980 Register method_address_reg =
2981 invoke->GetLocations()->InAt(method_address_index).AsRegister<Register>();
Vladimir Marko4ef451a2020-07-23 09:54:27 +00002982 __ movl(out,
2983 Address(method_address_reg, out, TIMES_4, CodeGeneratorX86::kPlaceholder32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01002984 codegen_->RecordBootImageIntrinsicPatch(method_address, info.array_data_boot_image_reference);
2985 } else {
2986 // Note: We're about to clobber the index in `out`, so we need to use `in` and
2987 // adjust the offset accordingly.
2988 uint32_t mid_array_boot_image_offset =
2989 info.array_data_boot_image_reference - info.low * kElementSize;
2990 codegen_->LoadBootImageAddress(
2991 out, mid_array_boot_image_offset, invoke->AsInvokeStaticOrDirect());
2992 DCHECK_NE(out, in);
2993 __ movl(out, Address(out, in, TIMES_4, 0));
2994 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002995 __ MaybeUnpoisonHeapReference(out);
2996 __ jmp(&done);
2997 __ Bind(&allocate);
2998 // Otherwise allocate and initialize a new j.l.Integer.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002999 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
3000 info.integer_boot_image_offset);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00003001 __ movl(Address(out, info.value_offset), in);
3002 __ Bind(&done);
3003 }
3004}
3005
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003006void IntrinsicLocationsBuilderX86::VisitThreadInterrupted(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003007 LocationSummary* locations =
3008 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003009 locations->SetOut(Location::RequiresRegister());
3010}
3011
3012void IntrinsicCodeGeneratorX86::VisitThreadInterrupted(HInvoke* invoke) {
3013 X86Assembler* assembler = GetAssembler();
3014 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
3015 Address address = Address::Absolute(Thread::InterruptedOffset<kX86PointerSize>().Int32Value());
3016 NearLabel done;
3017 __ fs()->movl(out, address);
3018 __ testl(out, out);
3019 __ j(kEqual, &done);
3020 __ fs()->movl(address, Immediate(0));
3021 codegen_->MemoryFence();
3022 __ Bind(&done);
3023}
3024
Hans Boehmc7b28de2018-03-09 17:05:28 -08003025void IntrinsicLocationsBuilderX86::VisitReachabilityFence(HInvoke* invoke) {
3026 LocationSummary* locations =
3027 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
3028 locations->SetInAt(0, Location::Any());
3029}
3030
3031void IntrinsicCodeGeneratorX86::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003032
Andra Danciudc787f42020-07-07 14:28:56 +00003033void IntrinsicLocationsBuilderX86::VisitIntegerDivideUnsigned(HInvoke* invoke) {
3034 LocationSummary* locations = new (allocator_) LocationSummary(invoke,
3035 LocationSummary::kCallOnSlowPath,
3036 kIntrinsified);
3037 locations->SetInAt(0, Location::RegisterLocation(EAX));
3038 locations->SetInAt(1, Location::RequiresRegister());
3039 locations->SetOut(Location::SameAsFirstInput());
3040 // Intel uses edx:eax as the dividend.
3041 locations->AddTemp(Location::RegisterLocation(EDX));
3042}
3043
3044void IntrinsicCodeGeneratorX86::VisitIntegerDivideUnsigned(HInvoke* invoke) {
3045 X86Assembler* assembler = GetAssembler();
3046 LocationSummary* locations = invoke->GetLocations();
3047 Location out = locations->Out();
3048 Location first = locations->InAt(0);
3049 Location second = locations->InAt(1);
3050 Register edx = locations->GetTemp(0).AsRegister<Register>();
3051 Register second_reg = second.AsRegister<Register>();
3052
3053 DCHECK_EQ(EAX, first.AsRegister<Register>());
3054 DCHECK_EQ(EAX, out.AsRegister<Register>());
3055 DCHECK_EQ(EDX, edx);
3056
3057 // Check if divisor is zero, bail to managed implementation to handle.
3058 __ testl(second_reg, second_reg);
3059 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
3060 codegen_->AddSlowPath(slow_path);
3061 __ j(kEqual, slow_path->GetEntryLabel());
3062
3063 __ xorl(edx, edx);
3064 __ divl(second_reg);
3065
3066 __ Bind(slow_path->GetExitLabel());
3067}
3068
Andra Danciu1ca6f322020-08-12 08:58:07 +00003069void IntrinsicLocationsBuilderX86::VisitVarHandleGet(HInvoke* invoke) {
3070 // The only read barrier implementation supporting the
3071 // VarHandleGet intrinsic is the Baker-style read barriers.
3072 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
3073 return;
Andra Danciue3e187f2020-07-30 12:19:31 +00003074 }
3075
Andra Danciu1ca6f322020-08-12 08:58:07 +00003076 DataType::Type type = invoke->GetType();
3077
3078 if (type == DataType::Type::kVoid) {
3079 // Return type should not be void for get.
3080 return;
3081 }
3082
Andra Danciu1ca6f322020-08-12 08:58:07 +00003083 if (invoke->GetNumberOfArguments() == 1u) {
3084 // Static field get
3085 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
3086 LocationSummary* locations = new (allocator) LocationSummary(
3087 invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
3088 locations->SetInAt(0, Location::RequiresRegister());
3089 locations->AddTemp(Location::RequiresRegister());
3090
3091 switch (DataType::Kind(type)) {
3092 case DataType::Type::kInt64:
3093 locations->AddTemp(Location::RequiresRegister());
3094 FALLTHROUGH_INTENDED;
3095 case DataType::Type::kInt32:
3096 locations->SetOut(Location::RequiresRegister());
3097 break;
Andra Danciuaa358832020-08-25 15:09:43 +00003098 case DataType::Type::kReference:
3099 // The second input is not an instruction argument. It is the callsite return type
3100 // used to check the compatibility with VarHandle type.
3101 locations->SetInAt(1, Location::RequiresRegister());
3102 locations->SetOut(Location::RequiresRegister());
3103 break;
Andra Danciu1ca6f322020-08-12 08:58:07 +00003104 default:
3105 DCHECK(DataType::IsFloatingPointType(type));
3106 locations->AddTemp(Location::RequiresRegister());
3107 locations->SetOut(Location::RequiresFpuRegister());
3108 }
3109 }
3110
3111 // TODO: support instance fields, arrays, etc.
Andra Danciue3e187f2020-07-30 12:19:31 +00003112}
3113
Andra Danciu1ca6f322020-08-12 08:58:07 +00003114void IntrinsicCodeGeneratorX86::VisitVarHandleGet(HInvoke* invoke) {
3115 // The only read barrier implementation supporting the
3116 // VarHandleGet intrinsic is the Baker-style read barriers.
3117 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Andra Danciue74df4c2020-08-10 09:35:51 +00003118
Andra Danciu1ca6f322020-08-12 08:58:07 +00003119 X86Assembler* assembler = codegen_->GetAssembler();
3120 LocationSummary* locations = invoke->GetLocations();
3121 Register varhandle_object = locations->InAt(0).AsRegister<Register>();
Andra Danciue3e187f2020-07-30 12:19:31 +00003122 const uint32_t access_modes_bitmask_offset =
3123 mirror::VarHandle::AccessModesBitMaskOffset().Uint32Value();
3124 mirror::VarHandle::AccessMode access_mode =
3125 mirror::VarHandle::GetAccessModeByIntrinsic(invoke->GetIntrinsic());
3126 const uint32_t access_mode_bit = 1u << static_cast<uint32_t>(access_mode);
Andra Danciu1ca6f322020-08-12 08:58:07 +00003127 const uint32_t var_type_offset = mirror::VarHandle::VarTypeOffset().Uint32Value();
3128 const uint32_t coordtype0_offset = mirror::VarHandle::CoordinateType0Offset().Uint32Value();
Andra Danciuaa358832020-08-25 15:09:43 +00003129 const uint32_t super_class_offset = mirror::Class::SuperClassOffset().Uint32Value();
Andra Danciu1ca6f322020-08-12 08:58:07 +00003130 DataType::Type type = invoke->GetType();
Andra Danciu1ca6f322020-08-12 08:58:07 +00003131 DCHECK_NE(type, DataType::Type::kVoid);
Andra Danciu1ca6f322020-08-12 08:58:07 +00003132 Register temp = locations->GetTemp(0).AsRegister<Register>();
Andra Danciuaa358832020-08-25 15:09:43 +00003133 InstructionCodeGeneratorX86* instr_codegen =
3134 down_cast<InstructionCodeGeneratorX86*>(codegen_->GetInstructionVisitor());
Andra Danciue3e187f2020-07-30 12:19:31 +00003135
3136 // If the access mode is not supported, bail to runtime implementation to handle
3137 __ testl(Address(varhandle_object, access_modes_bitmask_offset), Immediate(access_mode_bit));
Andra Danciu1ca6f322020-08-12 08:58:07 +00003138 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
3139 codegen_->AddSlowPath(slow_path);
Andra Danciue3e187f2020-07-30 12:19:31 +00003140 __ j(kZero, slow_path->GetEntryLabel());
3141
Andra Danciu1ca6f322020-08-12 08:58:07 +00003142 // Check that the varhandle references a static field by checking that coordinateType0 == null.
3143 // Do not emit read barrier (or unpoison the reference) for comparing to null.
3144 __ cmpl(Address(varhandle_object, coordtype0_offset), Immediate(0));
3145 __ j(kNotEqual, slow_path->GetEntryLabel());
Andra Danciuaa358832020-08-25 15:09:43 +00003146 // For primitive types, we do not need a read barrier when loading a reference only for loading
3147 // constant field through the reference. For reference types, we deliberately avoid the read
3148 // barrier, letting the slow path handle the false negatives.
3149 __ movl(temp, Address(varhandle_object, var_type_offset));
3150 __ MaybeUnpoisonHeapReference(temp);
3151 // Check the varType against the type we're trying to retrieve.
3152 if (type == DataType::Type::kReference) {
3153 // For reference types, check the type's class reference and if it's not an exact match,
3154 // check if it is an inherited type.
3155 Register callsite_ret_type = locations->InAt(1).AsRegister<Register>();
3156 NearLabel check_ret_type_compatibility, ret_type_matched;
3157
3158 __ Bind(&check_ret_type_compatibility);
3159 __ cmpl(temp, callsite_ret_type);
3160 __ j(kEqual, &ret_type_matched);
3161 // Load the super class.
3162 __ movl(temp, Address(temp, super_class_offset));
3163 __ MaybeUnpoisonHeapReference(temp);
3164 // If the super class is null, we reached the root of the hierarchy. The types are not
3165 // compatible.
3166 __ cmpl(temp, Immediate(0));
3167 __ j(kEqual, slow_path->GetEntryLabel());
3168 __ jmp(&check_ret_type_compatibility);
3169 __ Bind(&ret_type_matched);
3170 } else {
3171 // For primitive types, check the varType.primitiveType field.
3172 uint32_t primitive_type = static_cast<uint32_t>(DataTypeToPrimitive(type));
3173 const uint32_t primitive_type_offset = mirror::Class::PrimitiveTypeOffset().Uint32Value();
3174
3175 __ cmpw(Address(temp, primitive_type_offset), Immediate(primitive_type));
3176 __ j(kNotEqual, slow_path->GetEntryLabel());
3177 }
Andra Danciu1ca6f322020-08-12 08:58:07 +00003178
3179 Location out = locations->Out();
3180 // Use 'out' as a temporary register if it's a core register
3181 Register offset =
3182 out.IsRegister() ? out.AsRegister<Register>() : locations->GetTemp(1).AsRegister<Register>();
3183 const uint32_t artfield_offset = mirror::FieldVarHandle::ArtFieldOffset().Uint32Value();
3184 const uint32_t offset_offset = ArtField::OffsetOffset().Uint32Value();
3185 const uint32_t declaring_class_offset = ArtField::DeclaringClassOffset().Uint32Value();
3186
3187 // Load the ArtField, the offset and declaring class
3188 __ movl(temp, Address(varhandle_object, artfield_offset));
3189 __ movl(offset, Address(temp, offset_offset));
Andra Danciu1ca6f322020-08-12 08:58:07 +00003190 instr_codegen->GenerateGcRootFieldLoad(invoke,
3191 Location::RegisterLocation(temp),
3192 Address(temp, declaring_class_offset),
3193 /* fixup_label= */ nullptr,
3194 kCompilerReadBarrierOption);
3195
3196 // Load the value from the field
3197 CodeGeneratorX86* codegen_x86 = down_cast<CodeGeneratorX86*>(codegen_);
Andra Danciuaa358832020-08-25 15:09:43 +00003198 if (type == DataType::Type::kReference) {
3199 if (kCompilerReadBarrierOption == kWithReadBarrier) {
3200 codegen_x86->GenerateReferenceLoadWithBakerReadBarrier(invoke,
3201 out,
3202 temp,
3203 Address(temp, offset, TIMES_1, 0),
3204 /* needs_null_check= */ false);
3205 } else {
3206 __ movl(out.AsRegister<Register>(), Address(temp, offset, TIMES_1, 0));
3207 __ MaybeUnpoisonHeapReference(out.AsRegister<Register>());
3208 }
3209 } else {
3210 codegen_x86->MoveFromMemory(type, out, temp, offset);
3211 }
Andra Danciue3e187f2020-07-30 12:19:31 +00003212
3213 __ Bind(slow_path->GetExitLabel());
3214}
3215
Andra Danciue3e187f2020-07-30 12:19:31 +00003216
Aart Bik2f9fcc92016-03-01 15:16:54 -08003217UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
Vladimir Marko4ee8e292017-06-02 15:39:30 +00003218UNIMPLEMENTED_INTRINSIC(X86, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003219UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
3220UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite)
3221UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit)
3222UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit)
xueliang.zhongcb58b072017-10-13 12:06:56 +01003223UNIMPLEMENTED_INTRINSIC(X86, CRC32Update)
Evgeny Astigeevich15c5b972018-11-20 13:41:40 +00003224UNIMPLEMENTED_INTRINSIC(X86, CRC32UpdateBytes)
Evgeny Astigeevich776a7c22018-12-17 11:40:34 +00003225UNIMPLEMENTED_INTRINSIC(X86, CRC32UpdateByteBuffer)
xueliang.zhong9ce340f2019-01-22 17:46:09 +00003226UNIMPLEMENTED_INTRINSIC(X86, FP16ToFloat)
Vladimir Marko7f958e32019-10-24 09:03:58 +00003227UNIMPLEMENTED_INTRINSIC(X86, FP16ToHalf)
Usama Arifb9f02c22019-10-25 17:37:33 +01003228UNIMPLEMENTED_INTRINSIC(X86, FP16Floor)
Usama Arif665aac42019-10-29 11:13:18 +00003229UNIMPLEMENTED_INTRINSIC(X86, FP16Ceil)
Usama Arif681692b2019-10-30 16:23:26 +00003230UNIMPLEMENTED_INTRINSIC(X86, FP16Rint)
Usama Arif457e9fa2019-11-11 15:29:59 +00003231UNIMPLEMENTED_INTRINSIC(X86, FP16Greater)
3232UNIMPLEMENTED_INTRINSIC(X86, FP16GreaterEquals)
3233UNIMPLEMENTED_INTRINSIC(X86, FP16Less)
3234UNIMPLEMENTED_INTRINSIC(X86, FP16LessEquals)
Mark Mendell09ed1a32015-03-25 08:30:06 -04003235
Aart Bikff7d89c2016-11-07 08:49:28 -08003236UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOf);
3237UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003238UNIMPLEMENTED_INTRINSIC(X86, StringBufferAppend);
3239UNIMPLEMENTED_INTRINSIC(X86, StringBufferLength);
3240UNIMPLEMENTED_INTRINSIC(X86, StringBufferToString);
Vladimir Markod4561172017-10-30 17:48:25 +00003241UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendObject);
3242UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendString);
3243UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendCharSequence);
3244UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendCharArray);
3245UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendBoolean);
3246UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendChar);
3247UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendInt);
3248UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendLong);
3249UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendFloat);
3250UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendDouble);
Aart Bik71bf7b42016-11-16 10:17:46 -08003251UNIMPLEMENTED_INTRINSIC(X86, StringBuilderLength);
3252UNIMPLEMENTED_INTRINSIC(X86, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003253
Aart Bik0e54c012016-03-04 12:08:31 -08003254// 1.8.
3255UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt)
3256UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong)
3257UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt)
3258UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong)
3259UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003260
Andra Danciua0130e82020-07-23 12:34:56 +00003261UNIMPLEMENTED_INTRINSIC(X86, VarHandleFullFence)
3262UNIMPLEMENTED_INTRINSIC(X86, VarHandleAcquireFence)
3263UNIMPLEMENTED_INTRINSIC(X86, VarHandleReleaseFence)
3264UNIMPLEMENTED_INTRINSIC(X86, VarHandleLoadLoadFence)
3265UNIMPLEMENTED_INTRINSIC(X86, VarHandleStoreStoreFence)
3266UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvokeExact)
3267UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvoke)
3268UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchange)
3269UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeAcquire)
3270UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeRelease)
3271UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndSet)
Andra Danciua0130e82020-07-23 12:34:56 +00003272UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAcquire)
3273UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAdd)
3274UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAddAcquire)
3275UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAddRelease)
3276UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAnd)
3277UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAndAcquire)
3278UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAndRelease)
3279UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOr)
3280UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOrAcquire)
3281UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOrRelease)
3282UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXor)
3283UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXorAcquire)
3284UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXorRelease)
3285UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSet)
3286UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSetAcquire)
3287UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSetRelease)
3288UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetOpaque)
3289UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetVolatile)
3290UNIMPLEMENTED_INTRINSIC(X86, VarHandleSet)
3291UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetOpaque)
3292UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetRelease)
3293UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetVolatile)
3294UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSet)
3295UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetAcquire)
3296UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetPlain)
3297UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetRelease)
3298
Aart Bik2f9fcc92016-03-01 15:16:54 -08003299UNREACHABLE_INTRINSICS(X86)
Roland Levillain4d027112015-07-01 15:41:14 +01003300
3301#undef __
3302
Mark Mendell09ed1a32015-03-25 08:30:06 -04003303} // namespace x86
3304} // namespace art