blob: 2710eb191299d37f8c846cb2d77da9542b948fc4 [file] [log] [blame]
Andreas Gampe9954e3b2016-08-05 20:34:39 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "jni_macro_assembler_x86.h"
18
Andreas Gampe9954e3b2016-08-05 20:34:39 -070019#include "base/casts.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "thread.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070022#include "utils/assembler.h"
Andreas Gampe9954e3b2016-08-05 20:34:39 -070023
24namespace art {
25namespace x86 {
26
Vladimir Marko662f12e2020-02-26 12:46:09 +000027static Register GetScratchRegister() {
28 // ECX is an argument register on entry and gets spilled in BuildFrame().
29 // After that, we can use it as a scratch register.
30 return ECX;
31}
32
Andreas Gampe9954e3b2016-08-05 20:34:39 -070033// Slowpath entered when Thread::Current()->_exception is non-null
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010034class X86ExceptionSlowPath final : public SlowPath {
Andreas Gampe9954e3b2016-08-05 20:34:39 -070035 public:
36 explicit X86ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {}
Roland Levillainf73caca2018-08-24 17:19:07 +010037 void Emit(Assembler *sp_asm) override;
Andreas Gampe9954e3b2016-08-05 20:34:39 -070038 private:
39 const size_t stack_adjust_;
40};
41
42static dwarf::Reg DWARFReg(Register reg) {
43 return dwarf::Reg::X86Core(static_cast<int>(reg));
44}
45
46constexpr size_t kFramePointerSize = 4;
47
Vladimir Marko1c3c1062019-12-03 11:18:44 +000048static constexpr size_t kNativeStackAlignment = 16;
49static_assert(kNativeStackAlignment == kStackAlignment);
50
Andreas Gampe9954e3b2016-08-05 20:34:39 -070051#define __ asm_.
52
53void X86JNIMacroAssembler::BuildFrame(size_t frame_size,
54 ManagedRegister method_reg,
Vladimir Marko662f12e2020-02-26 12:46:09 +000055 ArrayRef<const ManagedRegister> spill_regs) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -070056 DCHECK_EQ(CodeSize(), 0U); // Nothing emitted yet.
57 cfi().SetCurrentCFAOffset(4); // Return address on stack.
Vladimir Marko1c3c1062019-12-03 11:18:44 +000058 if (frame_size == kFramePointerSize) {
59 // For @CriticalNative tail call.
60 CHECK(method_reg.IsNoRegister());
61 CHECK(spill_regs.empty());
62 } else if (method_reg.IsNoRegister()) {
63 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
64 } else {
65 CHECK_ALIGNED(frame_size, kStackAlignment);
66 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -070067 int gpr_count = 0;
68 for (int i = spill_regs.size() - 1; i >= 0; --i) {
69 Register spill = spill_regs[i].AsX86().AsCpuRegister();
70 __ pushl(spill);
71 gpr_count++;
72 cfi().AdjustCFAOffset(kFramePointerSize);
73 cfi().RelOffset(DWARFReg(spill), 0);
74 }
75
76 // return address then method on stack.
77 int32_t adjust = frame_size - gpr_count * kFramePointerSize -
Vladimir Marko1c3c1062019-12-03 11:18:44 +000078 kFramePointerSize /*return address*/ -
79 (method_reg.IsRegister() ? kFramePointerSize /*method*/ : 0u);
80 if (adjust != 0) {
81 __ addl(ESP, Immediate(-adjust));
82 cfi().AdjustCFAOffset(adjust);
83 }
84 if (method_reg.IsRegister()) {
85 __ pushl(method_reg.AsX86().AsCpuRegister());
86 cfi().AdjustCFAOffset(kFramePointerSize);
87 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -070088 DCHECK_EQ(static_cast<size_t>(cfi().GetCurrentCFAOffset()), frame_size);
Andreas Gampe9954e3b2016-08-05 20:34:39 -070089}
90
91void X86JNIMacroAssembler::RemoveFrame(size_t frame_size,
Roland Levillain0d127e12017-07-05 17:01:11 +010092 ArrayRef<const ManagedRegister> spill_regs,
93 bool may_suspend ATTRIBUTE_UNUSED) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +000094 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
Andreas Gampe9954e3b2016-08-05 20:34:39 -070095 cfi().RememberState();
96 // -kFramePointerSize for ArtMethod*.
97 int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize;
Vladimir Marko1c3c1062019-12-03 11:18:44 +000098 if (adjust != 0) {
99 __ addl(ESP, Immediate(adjust));
100 cfi().AdjustCFAOffset(-adjust);
101 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700102 for (size_t i = 0; i < spill_regs.size(); ++i) {
103 Register spill = spill_regs[i].AsX86().AsCpuRegister();
104 __ popl(spill);
105 cfi().AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
106 cfi().Restore(DWARFReg(spill));
107 }
108 __ ret();
109 // The CFI should be restored for any code that follows the exit block.
110 cfi().RestoreState();
111 cfi().DefCFAOffset(frame_size);
112}
113
114void X86JNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000115 if (adjust != 0u) {
116 CHECK_ALIGNED(adjust, kNativeStackAlignment);
117 __ addl(ESP, Immediate(-adjust));
118 cfi().AdjustCFAOffset(adjust);
119 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700120}
121
122static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000123 if (adjust != 0u) {
124 CHECK_ALIGNED(adjust, kNativeStackAlignment);
125 assembler->addl(ESP, Immediate(adjust));
126 assembler->cfi().AdjustCFAOffset(-adjust);
127 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700128}
129
130void X86JNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
131 DecreaseFrameSizeImpl(&asm_, adjust);
132}
133
134void X86JNIMacroAssembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
135 X86ManagedRegister src = msrc.AsX86();
136 if (src.IsNoRegister()) {
137 CHECK_EQ(0u, size);
138 } else if (src.IsCpuRegister()) {
139 CHECK_EQ(4u, size);
140 __ movl(Address(ESP, offs), src.AsCpuRegister());
141 } else if (src.IsRegisterPair()) {
142 CHECK_EQ(8u, size);
143 __ movl(Address(ESP, offs), src.AsRegisterPairLow());
144 __ movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), src.AsRegisterPairHigh());
145 } else if (src.IsX87Register()) {
146 if (size == 4) {
147 __ fstps(Address(ESP, offs));
148 } else {
149 __ fstpl(Address(ESP, offs));
150 }
151 } else {
152 CHECK(src.IsXmmRegister());
153 if (size == 4) {
154 __ movss(Address(ESP, offs), src.AsXmmRegister());
155 } else {
156 __ movsd(Address(ESP, offs), src.AsXmmRegister());
157 }
158 }
159}
160
161void X86JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
162 X86ManagedRegister src = msrc.AsX86();
163 CHECK(src.IsCpuRegister());
164 __ movl(Address(ESP, dest), src.AsCpuRegister());
165}
166
167void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
168 X86ManagedRegister src = msrc.AsX86();
169 CHECK(src.IsCpuRegister());
170 __ movl(Address(ESP, dest), src.AsCpuRegister());
171}
172
Vladimir Marko662f12e2020-02-26 12:46:09 +0000173void X86JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700174 __ movl(Address(ESP, dest), Immediate(imm));
175}
176
Vladimir Marko662f12e2020-02-26 12:46:09 +0000177void X86JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) {
178 Register scratch = GetScratchRegister();
179 __ leal(scratch, Address(ESP, fr_offs));
180 __ fs()->movl(Address::Absolute(thr_offs), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700181}
182
183void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
184 __ fs()->movl(Address::Absolute(thr_offs), ESP);
185}
186
187void X86JNIMacroAssembler::StoreSpanning(FrameOffset /*dst*/,
188 ManagedRegister /*src*/,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000189 FrameOffset /*in_off*/) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700190 UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
191}
192
193void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
194 X86ManagedRegister dest = mdest.AsX86();
195 if (dest.IsNoRegister()) {
196 CHECK_EQ(0u, size);
197 } else if (dest.IsCpuRegister()) {
198 CHECK_EQ(4u, size);
199 __ movl(dest.AsCpuRegister(), Address(ESP, src));
200 } else if (dest.IsRegisterPair()) {
201 CHECK_EQ(8u, size);
202 __ movl(dest.AsRegisterPairLow(), Address(ESP, src));
203 __ movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
204 } else if (dest.IsX87Register()) {
205 if (size == 4) {
206 __ flds(Address(ESP, src));
207 } else {
208 __ fldl(Address(ESP, src));
209 }
210 } else {
211 CHECK(dest.IsXmmRegister());
212 if (size == 4) {
213 __ movss(dest.AsXmmRegister(), Address(ESP, src));
214 } else {
215 __ movsd(dest.AsXmmRegister(), Address(ESP, src));
216 }
217 }
218}
219
220void X86JNIMacroAssembler::LoadFromThread(ManagedRegister mdest, ThreadOffset32 src, size_t size) {
221 X86ManagedRegister dest = mdest.AsX86();
222 if (dest.IsNoRegister()) {
223 CHECK_EQ(0u, size);
224 } else if (dest.IsCpuRegister()) {
Igor Murashkinae7ff922016-10-06 14:59:19 -0700225 if (size == 1u) {
226 __ fs()->movzxb(dest.AsCpuRegister(), Address::Absolute(src));
227 } else {
228 CHECK_EQ(4u, size);
229 __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
230 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700231 } else if (dest.IsRegisterPair()) {
232 CHECK_EQ(8u, size);
233 __ fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
234 __ fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset32(src.Int32Value()+4)));
235 } else if (dest.IsX87Register()) {
236 if (size == 4) {
237 __ fs()->flds(Address::Absolute(src));
238 } else {
239 __ fs()->fldl(Address::Absolute(src));
240 }
241 } else {
242 CHECK(dest.IsXmmRegister());
243 if (size == 4) {
244 __ fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
245 } else {
246 __ fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
247 }
248 }
249}
250
251void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
252 X86ManagedRegister dest = mdest.AsX86();
253 CHECK(dest.IsCpuRegister());
254 __ movl(dest.AsCpuRegister(), Address(ESP, src));
255}
256
257void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
258 bool unpoison_reference) {
259 X86ManagedRegister dest = mdest.AsX86();
260 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
261 __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
262 if (unpoison_reference) {
263 __ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
264 }
265}
266
267void X86JNIMacroAssembler::LoadRawPtr(ManagedRegister mdest,
268 ManagedRegister base,
269 Offset offs) {
270 X86ManagedRegister dest = mdest.AsX86();
271 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
272 __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
273}
274
275void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
276 X86ManagedRegister dest = mdest.AsX86();
277 CHECK(dest.IsCpuRegister());
278 __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
279}
280
281void X86JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) {
282 X86ManagedRegister reg = mreg.AsX86();
283 CHECK(size == 1 || size == 2) << size;
284 CHECK(reg.IsCpuRegister()) << reg;
285 if (size == 1) {
286 __ movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
287 } else {
288 __ movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
289 }
290}
291
292void X86JNIMacroAssembler::ZeroExtend(ManagedRegister mreg, size_t size) {
293 X86ManagedRegister reg = mreg.AsX86();
294 CHECK(size == 1 || size == 2) << size;
295 CHECK(reg.IsCpuRegister()) << reg;
296 if (size == 1) {
297 __ movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
298 } else {
299 __ movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
300 }
301}
302
Vladimir Marko03008222020-03-06 14:04:21 +0000303void X86JNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests,
304 ArrayRef<ArgumentLocation> srcs) {
305 DCHECK_EQ(dests.size(), srcs.size());
306 bool found_hidden_arg = false;
307 for (size_t i = 0, arg_count = srcs.size(); i != arg_count; ++i) {
308 const ArgumentLocation& src = srcs[i];
309 const ArgumentLocation& dest = dests[i];
310 DCHECK_EQ(src.GetSize(), dest.GetSize());
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000311 if (src.IsRegister()) {
312 if (UNLIKELY(dest.IsRegister())) {
313 // Native ABI has only stack arguments but we may pass one "hidden arg" in register.
314 CHECK(!found_hidden_arg);
315 found_hidden_arg = true;
316 DCHECK(
317 !dest.GetRegister().Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
318 Move(dest.GetRegister(), src.GetRegister(), dest.GetSize());
Vladimir Marko03008222020-03-06 14:04:21 +0000319 } else {
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000320 Store(dest.GetFrameOffset(), src.GetRegister(), dest.GetSize());
Vladimir Marko03008222020-03-06 14:04:21 +0000321 }
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000322 } else {
323 // Delay copying until we have spilled all registers, including the scratch register ECX.
324 }
325 }
326 for (size_t i = 0, arg_count = srcs.size(); i != arg_count; ++i) {
327 const ArgumentLocation& src = srcs[i];
328 const ArgumentLocation& dest = dests[i];
329 DCHECK_EQ(src.GetSize(), dest.GetSize());
330 if (!src.IsRegister()) {
331 DCHECK(!dest.IsRegister());
332 Copy(dest.GetFrameOffset(), src.GetFrameOffset(), dest.GetSize());
Vladimir Marko03008222020-03-06 14:04:21 +0000333 }
334 }
335}
336
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700337void X86JNIMacroAssembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000338 DCHECK(!mdest.Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700339 X86ManagedRegister dest = mdest.AsX86();
340 X86ManagedRegister src = msrc.AsX86();
341 if (!dest.Equals(src)) {
342 if (dest.IsCpuRegister() && src.IsCpuRegister()) {
343 __ movl(dest.AsCpuRegister(), src.AsCpuRegister());
344 } else if (src.IsX87Register() && dest.IsXmmRegister()) {
345 // Pass via stack and pop X87 register
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000346 IncreaseFrameSize(16);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700347 if (size == 4) {
348 CHECK_EQ(src.AsX87Register(), ST0);
349 __ fstps(Address(ESP, 0));
350 __ movss(dest.AsXmmRegister(), Address(ESP, 0));
351 } else {
352 CHECK_EQ(src.AsX87Register(), ST0);
353 __ fstpl(Address(ESP, 0));
354 __ movsd(dest.AsXmmRegister(), Address(ESP, 0));
355 }
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000356 DecreaseFrameSize(16);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700357 } else {
358 // TODO: x87, SSE
359 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
360 }
361 }
362}
363
Vladimir Marko662f12e2020-02-26 12:46:09 +0000364void X86JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
365 Register scratch = GetScratchRegister();
366 __ movl(scratch, Address(ESP, src));
367 __ movl(Address(ESP, dest), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700368}
369
Vladimir Marko662f12e2020-02-26 12:46:09 +0000370void X86JNIMacroAssembler::CopyRef(FrameOffset dest,
371 ManagedRegister base,
372 MemberOffset offs,
373 bool unpoison_reference) {
374 Register scratch = GetScratchRegister();
375 __ movl(scratch, Address(base.AsX86().AsCpuRegister(), offs));
376 if (unpoison_reference) {
377 __ MaybeUnpoisonHeapReference(scratch);
378 }
379 __ movl(Address(ESP, dest), scratch);
380}
381
382void X86JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) {
383 Register scratch = GetScratchRegister();
384 __ fs()->movl(scratch, Address::Absolute(thr_offs));
385 __ movl(Address(ESP, fr_offs), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700386}
387
388void X86JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs,
389 FrameOffset fr_offs,
390 ManagedRegister mscratch) {
391 X86ManagedRegister scratch = mscratch.AsX86();
392 CHECK(scratch.IsCpuRegister());
393 Load(scratch, fr_offs, 4);
394 __ fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
395}
396
Vladimir Marko662f12e2020-02-26 12:46:09 +0000397void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
398 DCHECK(size == 4 || size == 8) << size;
399 Register scratch = GetScratchRegister();
400 __ movl(scratch, Address(ESP, src));
401 __ movl(Address(ESP, dest), scratch);
402 if (size == 8) {
403 __ movl(scratch, Address(ESP, FrameOffset(src.Int32Value() + 4)));
404 __ movl(Address(ESP, FrameOffset(dest.Int32Value() + 4)), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700405 }
406}
407
408void X86JNIMacroAssembler::Copy(FrameOffset /*dst*/,
409 ManagedRegister /*src_base*/,
410 Offset /*src_offset*/,
411 ManagedRegister /*scratch*/,
412 size_t /*size*/) {
413 UNIMPLEMENTED(FATAL);
414}
415
416void X86JNIMacroAssembler::Copy(ManagedRegister dest_base,
417 Offset dest_offset,
418 FrameOffset src,
419 ManagedRegister scratch,
420 size_t size) {
421 CHECK(scratch.IsNoRegister());
422 CHECK_EQ(size, 4u);
423 __ pushl(Address(ESP, src));
424 __ popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
425}
426
427void X86JNIMacroAssembler::Copy(FrameOffset dest,
428 FrameOffset src_base,
429 Offset src_offset,
430 ManagedRegister mscratch,
431 size_t size) {
432 Register scratch = mscratch.AsX86().AsCpuRegister();
433 CHECK_EQ(size, 4u);
434 __ movl(scratch, Address(ESP, src_base));
435 __ movl(scratch, Address(scratch, src_offset));
436 __ movl(Address(ESP, dest), scratch);
437}
438
439void X86JNIMacroAssembler::Copy(ManagedRegister dest,
440 Offset dest_offset,
441 ManagedRegister src,
442 Offset src_offset,
443 ManagedRegister scratch,
444 size_t size) {
445 CHECK_EQ(size, 4u);
446 CHECK(scratch.IsNoRegister());
447 __ pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
448 __ popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
449}
450
451void X86JNIMacroAssembler::Copy(FrameOffset dest,
452 Offset dest_offset,
453 FrameOffset src,
454 Offset src_offset,
455 ManagedRegister mscratch,
456 size_t size) {
457 Register scratch = mscratch.AsX86().AsCpuRegister();
458 CHECK_EQ(size, 4u);
459 CHECK_EQ(dest.Int32Value(), src.Int32Value());
460 __ movl(scratch, Address(ESP, src));
461 __ pushl(Address(scratch, src_offset));
462 __ popl(Address(scratch, dest_offset));
463}
464
465void X86JNIMacroAssembler::MemoryBarrier(ManagedRegister) {
466 __ mfence();
467}
468
Vladimir Markocedec9d2021-02-08 16:16:13 +0000469void X86JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
470 FrameOffset spilled_reference_offset,
471 ManagedRegister min_reg,
472 bool null_allowed) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700473 X86ManagedRegister out_reg = mout_reg.AsX86();
474 X86ManagedRegister in_reg = min_reg.AsX86();
475 CHECK(in_reg.IsCpuRegister());
476 CHECK(out_reg.IsCpuRegister());
477 VerifyObject(in_reg, null_allowed);
478 if (null_allowed) {
479 Label null_arg;
480 if (!out_reg.Equals(in_reg)) {
481 __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
482 }
483 __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
484 __ j(kZero, &null_arg);
Vladimir Markocedec9d2021-02-08 16:16:13 +0000485 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700486 __ Bind(&null_arg);
487 } else {
Vladimir Markocedec9d2021-02-08 16:16:13 +0000488 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700489 }
490}
491
Vladimir Markocedec9d2021-02-08 16:16:13 +0000492void X86JNIMacroAssembler::CreateJObject(FrameOffset out_off,
493 FrameOffset spilled_reference_offset,
494 bool null_allowed) {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000495 Register scratch = GetScratchRegister();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700496 if (null_allowed) {
497 Label null_arg;
Vladimir Markocedec9d2021-02-08 16:16:13 +0000498 __ movl(scratch, Address(ESP, spilled_reference_offset));
Vladimir Marko662f12e2020-02-26 12:46:09 +0000499 __ testl(scratch, scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700500 __ j(kZero, &null_arg);
Vladimir Markocedec9d2021-02-08 16:16:13 +0000501 __ leal(scratch, Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700502 __ Bind(&null_arg);
503 } else {
Vladimir Markocedec9d2021-02-08 16:16:13 +0000504 __ leal(scratch, Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700505 }
Vladimir Marko662f12e2020-02-26 12:46:09 +0000506 __ movl(Address(ESP, out_off), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700507}
508
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700509void X86JNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
510 // TODO: not validating references
511}
512
513void X86JNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
514 // TODO: not validating references
515}
516
Vladimir Marko662f12e2020-02-26 12:46:09 +0000517void X86JNIMacroAssembler::Jump(ManagedRegister mbase, Offset offset) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000518 X86ManagedRegister base = mbase.AsX86();
519 CHECK(base.IsCpuRegister());
520 __ jmp(Address(base.AsCpuRegister(), offset.Int32Value()));
521}
522
Vladimir Marko662f12e2020-02-26 12:46:09 +0000523void X86JNIMacroAssembler::Call(ManagedRegister mbase, Offset offset) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700524 X86ManagedRegister base = mbase.AsX86();
525 CHECK(base.IsCpuRegister());
526 __ call(Address(base.AsCpuRegister(), offset.Int32Value()));
527 // TODO: place reference map on call
528}
529
Vladimir Marko662f12e2020-02-26 12:46:09 +0000530void X86JNIMacroAssembler::Call(FrameOffset base, Offset offset) {
531 Register scratch = GetScratchRegister();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700532 __ movl(scratch, Address(ESP, base));
533 __ call(Address(scratch, offset));
534}
535
Vladimir Marko662f12e2020-02-26 12:46:09 +0000536void X86JNIMacroAssembler::CallFromThread(ThreadOffset32 offset) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700537 __ fs()->call(Address::Absolute(offset));
538}
539
Vladimir Marko662f12e2020-02-26 12:46:09 +0000540void X86JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) {
541 __ fs()->movl(dest.AsX86().AsCpuRegister(),
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700542 Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
543}
544
Vladimir Marko662f12e2020-02-26 12:46:09 +0000545void X86JNIMacroAssembler::GetCurrentThread(FrameOffset offset) {
546 Register scratch = GetScratchRegister();
547 __ fs()->movl(scratch, Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
548 __ movl(Address(ESP, offset), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700549}
550
Vladimir Marko662f12e2020-02-26 12:46:09 +0000551void X86JNIMacroAssembler::ExceptionPoll(size_t stack_adjust) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100552 X86ExceptionSlowPath* slow = new (__ GetAllocator()) X86ExceptionSlowPath(stack_adjust);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700553 __ GetBuffer()->EnqueueSlowPath(slow);
554 __ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0));
555 __ j(kNotEqual, slow->Entry());
556}
557
Igor Murashkinae7ff922016-10-06 14:59:19 -0700558std::unique_ptr<JNIMacroLabel> X86JNIMacroAssembler::CreateLabel() {
559 return std::unique_ptr<JNIMacroLabel>(new X86JNIMacroLabel());
560}
561
562void X86JNIMacroAssembler::Jump(JNIMacroLabel* label) {
563 CHECK(label != nullptr);
564 __ jmp(X86JNIMacroLabel::Cast(label)->AsX86());
565}
566
Vladimir Marko662f12e2020-02-26 12:46:09 +0000567void X86JNIMacroAssembler::TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) {
Igor Murashkinae7ff922016-10-06 14:59:19 -0700568 CHECK(label != nullptr);
569
570 art::x86::Condition x86_cond;
Vladimir Marko662f12e2020-02-26 12:46:09 +0000571 switch (cond) {
Igor Murashkinae7ff922016-10-06 14:59:19 -0700572 case JNIMacroUnaryCondition::kZero:
573 x86_cond = art::x86::kZero;
574 break;
575 case JNIMacroUnaryCondition::kNotZero:
576 x86_cond = art::x86::kNotZero;
577 break;
578 default:
Vladimir Marko662f12e2020-02-26 12:46:09 +0000579 LOG(FATAL) << "Not implemented condition: " << static_cast<int>(cond);
Igor Murashkinae7ff922016-10-06 14:59:19 -0700580 UNREACHABLE();
581 }
582
Vladimir Markoe42876f2020-02-28 16:43:06 +0000583 // CMP self->tls32_.is_gc_marking, 0
Igor Murashkinae7ff922016-10-06 14:59:19 -0700584 // Jcc <Offset>
Vladimir Markoe42876f2020-02-28 16:43:06 +0000585 DCHECK_EQ(Thread::IsGcMarkingSize(), 4u);
586 __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>()), Immediate(0));
Igor Murashkinae7ff922016-10-06 14:59:19 -0700587 __ j(x86_cond, X86JNIMacroLabel::Cast(label)->AsX86());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700588}
589
590void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) {
591 CHECK(label != nullptr);
592 __ Bind(X86JNIMacroLabel::Cast(label)->AsX86());
593}
594
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700595#undef __
596
597void X86ExceptionSlowPath::Emit(Assembler *sasm) {
598 X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
599#define __ sp_asm->
600 __ Bind(&entry_);
601 // Note: the return value is dead
602 if (stack_adjust_ != 0) { // Fix up the frame.
603 DecreaseFrameSizeImpl(sp_asm, stack_adjust_);
604 }
605 // Pass exception as argument in EAX
606 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()));
607 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pDeliverException)));
608 // this call should never return
609 __ int3();
610#undef __
611}
612
613} // namespace x86
614} // namespace art