blob: 40fdc50f675b0ea5bade14f188efe0e488b93338 [file] [log] [blame]
Andreas Gampe9954e3b2016-08-05 20:34:39 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "jni_macro_assembler_x86.h"
18
Andreas Gampe9954e3b2016-08-05 20:34:39 -070019#include "base/casts.h"
20#include "entrypoints/quick/quick_entrypoints.h"
Vladimir Markoad333922021-11-02 10:51:57 +000021#include "lock_word.h"
Andreas Gampe9954e3b2016-08-05 20:34:39 -070022#include "thread.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070023#include "utils/assembler.h"
Andreas Gampe9954e3b2016-08-05 20:34:39 -070024
Vladimir Marko176362a2022-11-08 11:47:50 +000025namespace art HIDDEN {
Andreas Gampe9954e3b2016-08-05 20:34:39 -070026namespace x86 {
27
Vladimir Marko662f12e2020-02-26 12:46:09 +000028static Register GetScratchRegister() {
29 // ECX is an argument register on entry and gets spilled in BuildFrame().
30 // After that, we can use it as a scratch register.
31 return ECX;
32}
33
Andreas Gampe9954e3b2016-08-05 20:34:39 -070034static dwarf::Reg DWARFReg(Register reg) {
35 return dwarf::Reg::X86Core(static_cast<int>(reg));
36}
37
38constexpr size_t kFramePointerSize = 4;
39
Vladimir Marko1c3c1062019-12-03 11:18:44 +000040static constexpr size_t kNativeStackAlignment = 16;
41static_assert(kNativeStackAlignment == kStackAlignment);
42
Andreas Gampe9954e3b2016-08-05 20:34:39 -070043#define __ asm_.
44
45void X86JNIMacroAssembler::BuildFrame(size_t frame_size,
46 ManagedRegister method_reg,
Vladimir Marko662f12e2020-02-26 12:46:09 +000047 ArrayRef<const ManagedRegister> spill_regs) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -070048 DCHECK_EQ(CodeSize(), 0U); // Nothing emitted yet.
49 cfi().SetCurrentCFAOffset(4); // Return address on stack.
Vladimir Marko1c3c1062019-12-03 11:18:44 +000050 if (frame_size == kFramePointerSize) {
51 // For @CriticalNative tail call.
52 CHECK(method_reg.IsNoRegister());
53 CHECK(spill_regs.empty());
54 } else if (method_reg.IsNoRegister()) {
55 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
56 } else {
57 CHECK_ALIGNED(frame_size, kStackAlignment);
58 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -070059 int gpr_count = 0;
60 for (int i = spill_regs.size() - 1; i >= 0; --i) {
61 Register spill = spill_regs[i].AsX86().AsCpuRegister();
62 __ pushl(spill);
63 gpr_count++;
64 cfi().AdjustCFAOffset(kFramePointerSize);
65 cfi().RelOffset(DWARFReg(spill), 0);
66 }
67
68 // return address then method on stack.
69 int32_t adjust = frame_size - gpr_count * kFramePointerSize -
Vladimir Marko1c3c1062019-12-03 11:18:44 +000070 kFramePointerSize /*return address*/ -
71 (method_reg.IsRegister() ? kFramePointerSize /*method*/ : 0u);
72 if (adjust != 0) {
73 __ addl(ESP, Immediate(-adjust));
74 cfi().AdjustCFAOffset(adjust);
75 }
76 if (method_reg.IsRegister()) {
77 __ pushl(method_reg.AsX86().AsCpuRegister());
78 cfi().AdjustCFAOffset(kFramePointerSize);
79 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -070080 DCHECK_EQ(static_cast<size_t>(cfi().GetCurrentCFAOffset()), frame_size);
Andreas Gampe9954e3b2016-08-05 20:34:39 -070081}
82
83void X86JNIMacroAssembler::RemoveFrame(size_t frame_size,
Roland Levillain0d127e12017-07-05 17:01:11 +010084 ArrayRef<const ManagedRegister> spill_regs,
85 bool may_suspend ATTRIBUTE_UNUSED) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +000086 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
Andreas Gampe9954e3b2016-08-05 20:34:39 -070087 cfi().RememberState();
88 // -kFramePointerSize for ArtMethod*.
89 int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize;
Vladimir Marko1c3c1062019-12-03 11:18:44 +000090 if (adjust != 0) {
91 __ addl(ESP, Immediate(adjust));
92 cfi().AdjustCFAOffset(-adjust);
93 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -070094 for (size_t i = 0; i < spill_regs.size(); ++i) {
95 Register spill = spill_regs[i].AsX86().AsCpuRegister();
96 __ popl(spill);
97 cfi().AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
98 cfi().Restore(DWARFReg(spill));
99 }
100 __ ret();
101 // The CFI should be restored for any code that follows the exit block.
102 cfi().RestoreState();
103 cfi().DefCFAOffset(frame_size);
104}
105
106void X86JNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000107 if (adjust != 0u) {
108 CHECK_ALIGNED(adjust, kNativeStackAlignment);
109 __ addl(ESP, Immediate(-adjust));
110 cfi().AdjustCFAOffset(adjust);
111 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700112}
113
114static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000115 if (adjust != 0u) {
116 CHECK_ALIGNED(adjust, kNativeStackAlignment);
117 assembler->addl(ESP, Immediate(adjust));
118 assembler->cfi().AdjustCFAOffset(-adjust);
119 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700120}
121
Vladimir Markod95a1f22021-03-23 16:32:52 +0000122ManagedRegister X86JNIMacroAssembler::CoreRegisterWithSize(ManagedRegister src, size_t size) {
123 DCHECK(src.AsX86().IsCpuRegister());
124 DCHECK_EQ(size, 4u);
125 return src;
126}
127
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700128void X86JNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
129 DecreaseFrameSizeImpl(&asm_, adjust);
130}
131
132void X86JNIMacroAssembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000133 Store(X86ManagedRegister::FromCpuRegister(ESP), MemberOffset(offs.Int32Value()), msrc, size);
134}
135
136void X86JNIMacroAssembler::Store(ManagedRegister mbase,
137 MemberOffset offs,
138 ManagedRegister msrc,
139 size_t size) {
140 X86ManagedRegister base = mbase.AsX86();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700141 X86ManagedRegister src = msrc.AsX86();
142 if (src.IsNoRegister()) {
143 CHECK_EQ(0u, size);
144 } else if (src.IsCpuRegister()) {
145 CHECK_EQ(4u, size);
Vladimir Markod95a1f22021-03-23 16:32:52 +0000146 __ movl(Address(base.AsCpuRegister(), offs), src.AsCpuRegister());
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700147 } else if (src.IsRegisterPair()) {
148 CHECK_EQ(8u, size);
Vladimir Markod95a1f22021-03-23 16:32:52 +0000149 __ movl(Address(base.AsCpuRegister(), offs), src.AsRegisterPairLow());
150 __ movl(Address(base.AsCpuRegister(), FrameOffset(offs.Int32Value()+4)),
151 src.AsRegisterPairHigh());
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700152 } else if (src.IsX87Register()) {
153 if (size == 4) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000154 __ fstps(Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700155 } else {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000156 __ fstpl(Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700157 }
158 } else {
159 CHECK(src.IsXmmRegister());
160 if (size == 4) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000161 __ movss(Address(base.AsCpuRegister(), offs), src.AsXmmRegister());
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700162 } else {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000163 __ movsd(Address(base.AsCpuRegister(), offs), src.AsXmmRegister());
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700164 }
165 }
166}
167
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700168void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
169 X86ManagedRegister src = msrc.AsX86();
170 CHECK(src.IsCpuRegister());
171 __ movl(Address(ESP, dest), src.AsCpuRegister());
172}
173
Mythri Allec2632ac2022-05-13 14:37:52 +0000174void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) {
175 if (tag_sp) {
176 // There is no free register, store contents onto stack and restore back later.
177 Register scratch = ECX;
178 __ movl(Address(ESP, -32), scratch);
179 __ movl(scratch, ESP);
180 __ orl(scratch, Immediate(0x2));
181 __ fs()->movl(Address::Absolute(thr_offs), scratch);
182 __ movl(scratch, Address(ESP, -32));
183 } else {
184 __ fs()->movl(Address::Absolute(thr_offs), ESP);
185 }
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700186}
187
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700188void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000189 Load(mdest, X86ManagedRegister::FromCpuRegister(ESP), MemberOffset(src.Int32Value()), size);
190}
191
192void X86JNIMacroAssembler::Load(ManagedRegister mdest,
193 ManagedRegister mbase,
194 MemberOffset offs,
195 size_t size) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700196 X86ManagedRegister dest = mdest.AsX86();
Vladimir Markod95a1f22021-03-23 16:32:52 +0000197 X86ManagedRegister base = mbase.AsX86();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700198 if (dest.IsNoRegister()) {
199 CHECK_EQ(0u, size);
200 } else if (dest.IsCpuRegister()) {
201 CHECK_EQ(4u, size);
Vladimir Markod95a1f22021-03-23 16:32:52 +0000202 __ movl(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700203 } else if (dest.IsRegisterPair()) {
204 CHECK_EQ(8u, size);
Vladimir Markod95a1f22021-03-23 16:32:52 +0000205 __ movl(dest.AsRegisterPairLow(), Address(base.AsCpuRegister(), offs));
206 __ movl(dest.AsRegisterPairHigh(),
207 Address(base.AsCpuRegister(), FrameOffset(offs.Int32Value()+4)));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700208 } else if (dest.IsX87Register()) {
209 if (size == 4) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000210 __ flds(Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700211 } else {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000212 __ fldl(Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700213 }
214 } else {
215 CHECK(dest.IsXmmRegister());
216 if (size == 4) {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000217 __ movss(dest.AsXmmRegister(), Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700218 } else {
Vladimir Markod95a1f22021-03-23 16:32:52 +0000219 __ movsd(dest.AsXmmRegister(), Address(base.AsCpuRegister(), offs));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700220 }
221 }
222}
223
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700224void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
225 X86ManagedRegister dest = mdest.AsX86();
226 CHECK(dest.IsCpuRegister());
227 __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
228}
229
230void X86JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) {
231 X86ManagedRegister reg = mreg.AsX86();
232 CHECK(size == 1 || size == 2) << size;
233 CHECK(reg.IsCpuRegister()) << reg;
234 if (size == 1) {
235 __ movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
236 } else {
237 __ movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
238 }
239}
240
241void X86JNIMacroAssembler::ZeroExtend(ManagedRegister mreg, size_t size) {
242 X86ManagedRegister reg = mreg.AsX86();
243 CHECK(size == 1 || size == 2) << size;
244 CHECK(reg.IsCpuRegister()) << reg;
245 if (size == 1) {
246 __ movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
247 } else {
248 __ movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
249 }
250}
251
Vladimir Marko03008222020-03-06 14:04:21 +0000252void X86JNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests,
Vladimir Markod3aaf942021-11-02 10:51:57 +0000253 ArrayRef<ArgumentLocation> srcs,
254 ArrayRef<FrameOffset> refs) {
255 size_t arg_count = dests.size();
256 DCHECK_EQ(arg_count, srcs.size());
257 DCHECK_EQ(arg_count, refs.size());
258
259 // Store register args to stack slots. Convert processed references to `jobject`.
Vladimir Marko03008222020-03-06 14:04:21 +0000260 bool found_hidden_arg = false;
Vladimir Markod3aaf942021-11-02 10:51:57 +0000261 for (size_t i = 0; i != arg_count; ++i) {
Vladimir Marko03008222020-03-06 14:04:21 +0000262 const ArgumentLocation& src = srcs[i];
263 const ArgumentLocation& dest = dests[i];
Vladimir Markod3aaf942021-11-02 10:51:57 +0000264 const FrameOffset ref = refs[i];
265 DCHECK_EQ(src.GetSize(), dest.GetSize()); // Even for references.
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000266 if (src.IsRegister()) {
267 if (UNLIKELY(dest.IsRegister())) {
Vladimir Marko4d527152021-11-23 12:07:04 +0000268 if (dest.GetRegister().Equals(src.GetRegister())) {
269 // JNI compiler sometimes adds a no-op move.
270 continue;
271 }
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000272 // Native ABI has only stack arguments but we may pass one "hidden arg" in register.
273 CHECK(!found_hidden_arg);
274 found_hidden_arg = true;
Vladimir Markod3aaf942021-11-02 10:51:57 +0000275 DCHECK_EQ(ref, kInvalidReferenceOffset);
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000276 DCHECK(
277 !dest.GetRegister().Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
278 Move(dest.GetRegister(), src.GetRegister(), dest.GetSize());
Vladimir Marko03008222020-03-06 14:04:21 +0000279 } else {
Vladimir Markod3aaf942021-11-02 10:51:57 +0000280 if (ref != kInvalidReferenceOffset) {
Vladimir Markod3aaf942021-11-02 10:51:57 +0000281 // Note: We can clobber `src` here as the register cannot hold more than one argument.
282 // This overload of `CreateJObject()` currently does not use the scratch
283 // register ECX, so this shall not clobber another argument.
284 CreateJObject(src.GetRegister(), ref, src.GetRegister(), /*null_allowed=*/ i != 0u);
285 }
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000286 Store(dest.GetFrameOffset(), src.GetRegister(), dest.GetSize());
Vladimir Marko03008222020-03-06 14:04:21 +0000287 }
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000288 } else {
289 // Delay copying until we have spilled all registers, including the scratch register ECX.
290 }
291 }
Vladimir Markod3aaf942021-11-02 10:51:57 +0000292
293 // Copy incoming stack args. Convert processed references to `jobject`.
294 for (size_t i = 0; i != arg_count; ++i) {
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000295 const ArgumentLocation& src = srcs[i];
296 const ArgumentLocation& dest = dests[i];
Vladimir Markod3aaf942021-11-02 10:51:57 +0000297 const FrameOffset ref = refs[i];
298 DCHECK_EQ(src.GetSize(), dest.GetSize()); // Even for references.
Vladimir Marko2cefb3b2021-02-22 13:32:33 +0000299 if (!src.IsRegister()) {
300 DCHECK(!dest.IsRegister());
Vladimir Markod3aaf942021-11-02 10:51:57 +0000301 if (ref != kInvalidReferenceOffset) {
302 DCHECK_EQ(srcs[i].GetFrameOffset(), refs[i]);
303 CreateJObject(dest.GetFrameOffset(), ref, /*null_allowed=*/ i != 0u);
304 } else {
305 Copy(dest.GetFrameOffset(), src.GetFrameOffset(), dest.GetSize());
306 }
Vladimir Marko03008222020-03-06 14:04:21 +0000307 }
308 }
309}
310
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700311void X86JNIMacroAssembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000312 DCHECK(!mdest.Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700313 X86ManagedRegister dest = mdest.AsX86();
314 X86ManagedRegister src = msrc.AsX86();
315 if (!dest.Equals(src)) {
316 if (dest.IsCpuRegister() && src.IsCpuRegister()) {
317 __ movl(dest.AsCpuRegister(), src.AsCpuRegister());
318 } else if (src.IsX87Register() && dest.IsXmmRegister()) {
319 // Pass via stack and pop X87 register
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000320 IncreaseFrameSize(16);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700321 if (size == 4) {
322 CHECK_EQ(src.AsX87Register(), ST0);
323 __ fstps(Address(ESP, 0));
324 __ movss(dest.AsXmmRegister(), Address(ESP, 0));
325 } else {
326 CHECK_EQ(src.AsX87Register(), ST0);
327 __ fstpl(Address(ESP, 0));
328 __ movsd(dest.AsXmmRegister(), Address(ESP, 0));
329 }
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000330 DecreaseFrameSize(16);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700331 } else {
332 // TODO: x87, SSE
333 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
334 }
335 }
336}
337
Mythri Allebab6beb2022-10-21 13:28:05 +0000338void X86JNIMacroAssembler::Move(ManagedRegister mdest, size_t value) {
339 X86ManagedRegister dest = mdest.AsX86();
340 __ movl(dest.AsCpuRegister(), Immediate(value));
341}
342
Vladimir Marko662f12e2020-02-26 12:46:09 +0000343void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
344 DCHECK(size == 4 || size == 8) << size;
345 Register scratch = GetScratchRegister();
346 __ movl(scratch, Address(ESP, src));
347 __ movl(Address(ESP, dest), scratch);
348 if (size == 8) {
349 __ movl(scratch, Address(ESP, FrameOffset(src.Int32Value() + 4)));
350 __ movl(Address(ESP, FrameOffset(dest.Int32Value() + 4)), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700351 }
352}
353
Vladimir Markocedec9d2021-02-08 16:16:13 +0000354void X86JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
355 FrameOffset spilled_reference_offset,
356 ManagedRegister min_reg,
357 bool null_allowed) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700358 X86ManagedRegister out_reg = mout_reg.AsX86();
359 X86ManagedRegister in_reg = min_reg.AsX86();
360 CHECK(in_reg.IsCpuRegister());
361 CHECK(out_reg.IsCpuRegister());
362 VerifyObject(in_reg, null_allowed);
363 if (null_allowed) {
364 Label null_arg;
365 if (!out_reg.Equals(in_reg)) {
366 __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
367 }
368 __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
369 __ j(kZero, &null_arg);
Vladimir Markocedec9d2021-02-08 16:16:13 +0000370 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700371 __ Bind(&null_arg);
372 } else {
Vladimir Markocedec9d2021-02-08 16:16:13 +0000373 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700374 }
375}
376
Vladimir Markocedec9d2021-02-08 16:16:13 +0000377void X86JNIMacroAssembler::CreateJObject(FrameOffset out_off,
378 FrameOffset spilled_reference_offset,
379 bool null_allowed) {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000380 Register scratch = GetScratchRegister();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700381 if (null_allowed) {
382 Label null_arg;
Vladimir Markocedec9d2021-02-08 16:16:13 +0000383 __ movl(scratch, Address(ESP, spilled_reference_offset));
Vladimir Marko662f12e2020-02-26 12:46:09 +0000384 __ testl(scratch, scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700385 __ j(kZero, &null_arg);
Vladimir Markocedec9d2021-02-08 16:16:13 +0000386 __ leal(scratch, Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700387 __ Bind(&null_arg);
388 } else {
Vladimir Markocedec9d2021-02-08 16:16:13 +0000389 __ leal(scratch, Address(ESP, spilled_reference_offset));
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700390 }
Vladimir Marko662f12e2020-02-26 12:46:09 +0000391 __ movl(Address(ESP, out_off), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700392}
393
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700394void X86JNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
395 // TODO: not validating references
396}
397
398void X86JNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
399 // TODO: not validating references
400}
401
Vladimir Marko662f12e2020-02-26 12:46:09 +0000402void X86JNIMacroAssembler::Jump(ManagedRegister mbase, Offset offset) {
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000403 X86ManagedRegister base = mbase.AsX86();
404 CHECK(base.IsCpuRegister());
405 __ jmp(Address(base.AsCpuRegister(), offset.Int32Value()));
406}
407
Vladimir Marko662f12e2020-02-26 12:46:09 +0000408void X86JNIMacroAssembler::Call(ManagedRegister mbase, Offset offset) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700409 X86ManagedRegister base = mbase.AsX86();
410 CHECK(base.IsCpuRegister());
411 __ call(Address(base.AsCpuRegister(), offset.Int32Value()));
412 // TODO: place reference map on call
413}
414
Vladimir Marko662f12e2020-02-26 12:46:09 +0000415void X86JNIMacroAssembler::CallFromThread(ThreadOffset32 offset) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700416 __ fs()->call(Address::Absolute(offset));
417}
418
Vladimir Marko662f12e2020-02-26 12:46:09 +0000419void X86JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) {
420 __ fs()->movl(dest.AsX86().AsCpuRegister(),
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700421 Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
422}
423
Vladimir Marko662f12e2020-02-26 12:46:09 +0000424void X86JNIMacroAssembler::GetCurrentThread(FrameOffset offset) {
425 Register scratch = GetScratchRegister();
426 __ fs()->movl(scratch, Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
427 __ movl(Address(ESP, offset), scratch);
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700428}
429
Vladimir Markoce2a3442021-11-24 15:10:26 +0000430void X86JNIMacroAssembler::TryToTransitionFromRunnableToNative(
431 JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs) {
432 constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative);
433 constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable);
434 constexpr ThreadOffset32 thread_flags_offset = Thread::ThreadFlagsOffset<kX86PointerSize>();
435 constexpr ThreadOffset32 thread_held_mutex_mutator_lock_offset =
436 Thread::HeldMutexOffset<kX86PointerSize>(kMutatorLock);
437
438 // We need to preserve managed argument EAX.
439 DCHECK_GE(scratch_regs.size(), 2u);
440 Register saved_eax = scratch_regs[0].AsX86().AsCpuRegister();
441 Register scratch = scratch_regs[1].AsX86().AsCpuRegister();
442
Vladimir Marko1d7ad902021-12-08 14:36:44 +0000443 // CAS release, old_value = kRunnableStateValue, new_value = kNativeStateValue, no flags.
Vladimir Markoce2a3442021-11-24 15:10:26 +0000444 __ movl(saved_eax, EAX); // Save EAX.
445 static_assert(kRunnableStateValue == 0u);
446 __ xorl(EAX, EAX);
447 __ movl(scratch, Immediate(kNativeStateValue));
448 __ fs()->LockCmpxchgl(Address::Absolute(thread_flags_offset.Uint32Value()), scratch);
449 // LOCK CMPXCHG has full barrier semantics, so we don't need barriers here.
450 __ movl(EAX, saved_eax); // Restore EAX; MOV does not change flags.
451 // If any flags are set, go to the slow path.
452 __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86());
453
454 // Clear `self->tlsPtr_.held_mutexes[kMutatorLock]`.
455 __ fs()->movl(Address::Absolute(thread_held_mutex_mutator_lock_offset.Uint32Value()),
456 Immediate(0));
457}
458
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000459void X86JNIMacroAssembler::TryToTransitionFromNativeToRunnable(
460 JNIMacroLabel* label,
461 ArrayRef<const ManagedRegister> scratch_regs,
462 ManagedRegister return_reg) {
463 constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative);
464 constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable);
465 constexpr ThreadOffset32 thread_flags_offset = Thread::ThreadFlagsOffset<kX86PointerSize>();
466 constexpr ThreadOffset32 thread_held_mutex_mutator_lock_offset =
467 Thread::HeldMutexOffset<kX86PointerSize>(kMutatorLock);
468 constexpr ThreadOffset32 thread_mutator_lock_offset =
469 Thread::MutatorLockOffset<kX86PointerSize>();
470
471 size_t scratch_index = 0u;
472 auto get_scratch_reg = [&]() {
473 while (true) {
474 DCHECK_LT(scratch_index, scratch_regs.size());
475 X86ManagedRegister scratch_reg = scratch_regs[scratch_index].AsX86();
476 ++scratch_index;
477 DCHECK(!scratch_reg.Overlaps(return_reg.AsX86()));
478 if (scratch_reg.AsCpuRegister() != EAX) {
479 return scratch_reg.AsCpuRegister();
480 }
481 }
482 };
483 Register scratch = get_scratch_reg();
484 bool preserve_eax = return_reg.AsX86().Overlaps(X86ManagedRegister::FromCpuRegister(EAX));
485 Register saved_eax = preserve_eax ? get_scratch_reg() : kNoRegister;
486
487 // CAS acquire, old_value = kNativeStateValue, new_value = kRunnableStateValue, no flags.
488 if (preserve_eax) {
489 __ movl(saved_eax, EAX); // Save EAX.
490 }
491 __ movl(EAX, Immediate(kNativeStateValue));
492 static_assert(kRunnableStateValue == 0u);
493 __ xorl(scratch, scratch);
494 __ fs()->LockCmpxchgl(Address::Absolute(thread_flags_offset.Uint32Value()), scratch);
495 // LOCK CMPXCHG has full barrier semantics, so we don't need barriers here.
496 if (preserve_eax) {
497 __ movl(EAX, saved_eax); // Restore EAX; MOV does not change flags.
498 }
499 // If any flags are set, or the state is not Native, go to the slow path.
500 // (While the thread can theoretically transition between different Suspended states,
501 // it would be very unexpected to see a state other than Native at this point.)
502 __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86());
503
504 // Set `self->tlsPtr_.held_mutexes[kMutatorLock]` to the mutator lock.
505 __ fs()->movl(scratch, Address::Absolute(thread_mutator_lock_offset.Uint32Value()));
506 __ fs()->movl(Address::Absolute(thread_held_mutex_mutator_lock_offset.Uint32Value()),
507 scratch);
508}
509
Vladimir Marko46a89102021-10-21 13:05:46 +0000510void X86JNIMacroAssembler::SuspendCheck(JNIMacroLabel* label) {
Vladimir Marko254a8582021-11-29 14:08:37 +0000511 __ fs()->testl(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>()),
512 Immediate(Thread::SuspendOrCheckpointRequestFlags()));
513 __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86());
Vladimir Marko46a89102021-10-21 13:05:46 +0000514}
515
Vladimir Markoc8c2bb62021-10-15 09:33:09 +0100516void X86JNIMacroAssembler::ExceptionPoll(JNIMacroLabel* label) {
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700517 __ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0));
Vladimir Markoc8c2bb62021-10-15 09:33:09 +0100518 __ j(kNotEqual, X86JNIMacroLabel::Cast(label)->AsX86());
519}
520
521void X86JNIMacroAssembler::DeliverPendingException() {
522 // Pass exception as argument in EAX
523 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()));
524 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pDeliverException)));
525 // this call should never return
526 __ int3();
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700527}
528
Igor Murashkinae7ff922016-10-06 14:59:19 -0700529std::unique_ptr<JNIMacroLabel> X86JNIMacroAssembler::CreateLabel() {
530 return std::unique_ptr<JNIMacroLabel>(new X86JNIMacroLabel());
531}
532
533void X86JNIMacroAssembler::Jump(JNIMacroLabel* label) {
534 CHECK(label != nullptr);
535 __ jmp(X86JNIMacroLabel::Cast(label)->AsX86());
536}
537
Vladimir Markoad333922021-11-02 10:51:57 +0000538static Condition UnaryConditionToX86Condition(JNIMacroUnaryCondition cond) {
Vladimir Marko662f12e2020-02-26 12:46:09 +0000539 switch (cond) {
Igor Murashkinae7ff922016-10-06 14:59:19 -0700540 case JNIMacroUnaryCondition::kZero:
Vladimir Markoad333922021-11-02 10:51:57 +0000541 return kZero;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700542 case JNIMacroUnaryCondition::kNotZero:
Vladimir Markoad333922021-11-02 10:51:57 +0000543 return kNotZero;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700544 default:
Vladimir Marko662f12e2020-02-26 12:46:09 +0000545 LOG(FATAL) << "Not implemented condition: " << static_cast<int>(cond);
Igor Murashkinae7ff922016-10-06 14:59:19 -0700546 UNREACHABLE();
547 }
Vladimir Markoad333922021-11-02 10:51:57 +0000548}
549
550void X86JNIMacroAssembler::TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) {
551 CHECK(label != nullptr);
Igor Murashkinae7ff922016-10-06 14:59:19 -0700552
Vladimir Markoe42876f2020-02-28 16:43:06 +0000553 // CMP self->tls32_.is_gc_marking, 0
Igor Murashkinae7ff922016-10-06 14:59:19 -0700554 // Jcc <Offset>
Vladimir Markoe42876f2020-02-28 16:43:06 +0000555 DCHECK_EQ(Thread::IsGcMarkingSize(), 4u);
556 __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>()), Immediate(0));
Vladimir Markoad333922021-11-02 10:51:57 +0000557 __ j(UnaryConditionToX86Condition(cond), X86JNIMacroLabel::Cast(label)->AsX86());
558}
559
560void X86JNIMacroAssembler::TestMarkBit(ManagedRegister mref,
561 JNIMacroLabel* label,
562 JNIMacroUnaryCondition cond) {
563 DCHECK(kUseBakerReadBarrier);
564 Register ref = mref.AsX86().AsCpuRegister();
565 static_assert(LockWord::kMarkBitStateSize == 1u);
566 __ testl(Address(ref, mirror::Object::MonitorOffset().SizeValue()),
567 Immediate(LockWord::kMarkBitStateMaskShifted));
568 __ j(UnaryConditionToX86Condition(cond), X86JNIMacroLabel::Cast(label)->AsX86());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700569}
570
Mythri Alle5eb7ad22022-07-05 12:44:52 +0000571
572void X86JNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) {
573 __ cmpb(Address::Absolute(address), Immediate(0));
574 __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86());
575}
576
Igor Murashkinae7ff922016-10-06 14:59:19 -0700577void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) {
578 CHECK(label != nullptr);
579 __ Bind(X86JNIMacroLabel::Cast(label)->AsX86());
580}
581
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700582#undef __
583
Andreas Gampe9954e3b2016-08-05 20:34:39 -0700584} // namespace x86
585} // namespace art