Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "jni_macro_assembler_x86.h" |
| 18 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 19 | #include "base/casts.h" |
| 20 | #include "entrypoints/quick/quick_entrypoints.h" |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 21 | #include "lock_word.h" |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 22 | #include "thread.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 23 | #include "utils/assembler.h" |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 24 | |
Vladimir Marko | 176362a | 2022-11-08 11:47:50 +0000 | [diff] [blame] | 25 | namespace art HIDDEN { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 26 | namespace x86 { |
| 27 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 28 | static Register GetScratchRegister() { |
| 29 | // ECX is an argument register on entry and gets spilled in BuildFrame(). |
| 30 | // After that, we can use it as a scratch register. |
| 31 | return ECX; |
| 32 | } |
| 33 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 34 | static dwarf::Reg DWARFReg(Register reg) { |
| 35 | return dwarf::Reg::X86Core(static_cast<int>(reg)); |
| 36 | } |
| 37 | |
| 38 | constexpr size_t kFramePointerSize = 4; |
| 39 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 40 | static constexpr size_t kNativeStackAlignment = 16; |
| 41 | static_assert(kNativeStackAlignment == kStackAlignment); |
| 42 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 43 | #define __ asm_. |
| 44 | |
| 45 | void X86JNIMacroAssembler::BuildFrame(size_t frame_size, |
| 46 | ManagedRegister method_reg, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 47 | ArrayRef<const ManagedRegister> spill_regs) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 48 | DCHECK_EQ(CodeSize(), 0U); // Nothing emitted yet. |
| 49 | cfi().SetCurrentCFAOffset(4); // Return address on stack. |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 50 | if (frame_size == kFramePointerSize) { |
| 51 | // For @CriticalNative tail call. |
| 52 | CHECK(method_reg.IsNoRegister()); |
| 53 | CHECK(spill_regs.empty()); |
| 54 | } else if (method_reg.IsNoRegister()) { |
| 55 | CHECK_ALIGNED(frame_size, kNativeStackAlignment); |
| 56 | } else { |
| 57 | CHECK_ALIGNED(frame_size, kStackAlignment); |
| 58 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 59 | int gpr_count = 0; |
| 60 | for (int i = spill_regs.size() - 1; i >= 0; --i) { |
| 61 | Register spill = spill_regs[i].AsX86().AsCpuRegister(); |
| 62 | __ pushl(spill); |
| 63 | gpr_count++; |
| 64 | cfi().AdjustCFAOffset(kFramePointerSize); |
| 65 | cfi().RelOffset(DWARFReg(spill), 0); |
| 66 | } |
| 67 | |
| 68 | // return address then method on stack. |
| 69 | int32_t adjust = frame_size - gpr_count * kFramePointerSize - |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 70 | kFramePointerSize /*return address*/ - |
| 71 | (method_reg.IsRegister() ? kFramePointerSize /*method*/ : 0u); |
| 72 | if (adjust != 0) { |
| 73 | __ addl(ESP, Immediate(-adjust)); |
| 74 | cfi().AdjustCFAOffset(adjust); |
| 75 | } |
| 76 | if (method_reg.IsRegister()) { |
| 77 | __ pushl(method_reg.AsX86().AsCpuRegister()); |
| 78 | cfi().AdjustCFAOffset(kFramePointerSize); |
| 79 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 80 | DCHECK_EQ(static_cast<size_t>(cfi().GetCurrentCFAOffset()), frame_size); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 81 | } |
| 82 | |
| 83 | void X86JNIMacroAssembler::RemoveFrame(size_t frame_size, |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 84 | ArrayRef<const ManagedRegister> spill_regs, |
| 85 | bool may_suspend ATTRIBUTE_UNUSED) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 86 | CHECK_ALIGNED(frame_size, kNativeStackAlignment); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 87 | cfi().RememberState(); |
| 88 | // -kFramePointerSize for ArtMethod*. |
| 89 | int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize; |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 90 | if (adjust != 0) { |
| 91 | __ addl(ESP, Immediate(adjust)); |
| 92 | cfi().AdjustCFAOffset(-adjust); |
| 93 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 94 | for (size_t i = 0; i < spill_regs.size(); ++i) { |
| 95 | Register spill = spill_regs[i].AsX86().AsCpuRegister(); |
| 96 | __ popl(spill); |
| 97 | cfi().AdjustCFAOffset(-static_cast<int>(kFramePointerSize)); |
| 98 | cfi().Restore(DWARFReg(spill)); |
| 99 | } |
| 100 | __ ret(); |
| 101 | // The CFI should be restored for any code that follows the exit block. |
| 102 | cfi().RestoreState(); |
| 103 | cfi().DefCFAOffset(frame_size); |
| 104 | } |
| 105 | |
| 106 | void X86JNIMacroAssembler::IncreaseFrameSize(size_t adjust) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 107 | if (adjust != 0u) { |
| 108 | CHECK_ALIGNED(adjust, kNativeStackAlignment); |
| 109 | __ addl(ESP, Immediate(-adjust)); |
| 110 | cfi().AdjustCFAOffset(adjust); |
| 111 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 112 | } |
| 113 | |
| 114 | static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 115 | if (adjust != 0u) { |
| 116 | CHECK_ALIGNED(adjust, kNativeStackAlignment); |
| 117 | assembler->addl(ESP, Immediate(adjust)); |
| 118 | assembler->cfi().AdjustCFAOffset(-adjust); |
| 119 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 120 | } |
| 121 | |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 122 | ManagedRegister X86JNIMacroAssembler::CoreRegisterWithSize(ManagedRegister src, size_t size) { |
| 123 | DCHECK(src.AsX86().IsCpuRegister()); |
| 124 | DCHECK_EQ(size, 4u); |
| 125 | return src; |
| 126 | } |
| 127 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 128 | void X86JNIMacroAssembler::DecreaseFrameSize(size_t adjust) { |
| 129 | DecreaseFrameSizeImpl(&asm_, adjust); |
| 130 | } |
| 131 | |
| 132 | void X86JNIMacroAssembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 133 | Store(X86ManagedRegister::FromCpuRegister(ESP), MemberOffset(offs.Int32Value()), msrc, size); |
| 134 | } |
| 135 | |
| 136 | void X86JNIMacroAssembler::Store(ManagedRegister mbase, |
| 137 | MemberOffset offs, |
| 138 | ManagedRegister msrc, |
| 139 | size_t size) { |
| 140 | X86ManagedRegister base = mbase.AsX86(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 141 | X86ManagedRegister src = msrc.AsX86(); |
| 142 | if (src.IsNoRegister()) { |
| 143 | CHECK_EQ(0u, size); |
| 144 | } else if (src.IsCpuRegister()) { |
| 145 | CHECK_EQ(4u, size); |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 146 | __ movl(Address(base.AsCpuRegister(), offs), src.AsCpuRegister()); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 147 | } else if (src.IsRegisterPair()) { |
| 148 | CHECK_EQ(8u, size); |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 149 | __ movl(Address(base.AsCpuRegister(), offs), src.AsRegisterPairLow()); |
| 150 | __ movl(Address(base.AsCpuRegister(), FrameOffset(offs.Int32Value()+4)), |
| 151 | src.AsRegisterPairHigh()); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 152 | } else if (src.IsX87Register()) { |
| 153 | if (size == 4) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 154 | __ fstps(Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 155 | } else { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 156 | __ fstpl(Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 157 | } |
| 158 | } else { |
| 159 | CHECK(src.IsXmmRegister()); |
| 160 | if (size == 4) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 161 | __ movss(Address(base.AsCpuRegister(), offs), src.AsXmmRegister()); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 162 | } else { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 163 | __ movsd(Address(base.AsCpuRegister(), offs), src.AsXmmRegister()); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 164 | } |
| 165 | } |
| 166 | } |
| 167 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 168 | void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { |
| 169 | X86ManagedRegister src = msrc.AsX86(); |
| 170 | CHECK(src.IsCpuRegister()); |
| 171 | __ movl(Address(ESP, dest), src.AsCpuRegister()); |
| 172 | } |
| 173 | |
Mythri Alle | c2632ac | 2022-05-13 14:37:52 +0000 | [diff] [blame] | 174 | void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) { |
| 175 | if (tag_sp) { |
| 176 | // There is no free register, store contents onto stack and restore back later. |
| 177 | Register scratch = ECX; |
| 178 | __ movl(Address(ESP, -32), scratch); |
| 179 | __ movl(scratch, ESP); |
| 180 | __ orl(scratch, Immediate(0x2)); |
| 181 | __ fs()->movl(Address::Absolute(thr_offs), scratch); |
| 182 | __ movl(scratch, Address(ESP, -32)); |
| 183 | } else { |
| 184 | __ fs()->movl(Address::Absolute(thr_offs), ESP); |
| 185 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 186 | } |
| 187 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 188 | void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 189 | Load(mdest, X86ManagedRegister::FromCpuRegister(ESP), MemberOffset(src.Int32Value()), size); |
| 190 | } |
| 191 | |
| 192 | void X86JNIMacroAssembler::Load(ManagedRegister mdest, |
| 193 | ManagedRegister mbase, |
| 194 | MemberOffset offs, |
| 195 | size_t size) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 196 | X86ManagedRegister dest = mdest.AsX86(); |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 197 | X86ManagedRegister base = mbase.AsX86(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 198 | if (dest.IsNoRegister()) { |
| 199 | CHECK_EQ(0u, size); |
| 200 | } else if (dest.IsCpuRegister()) { |
| 201 | CHECK_EQ(4u, size); |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 202 | __ movl(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 203 | } else if (dest.IsRegisterPair()) { |
| 204 | CHECK_EQ(8u, size); |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 205 | __ movl(dest.AsRegisterPairLow(), Address(base.AsCpuRegister(), offs)); |
| 206 | __ movl(dest.AsRegisterPairHigh(), |
| 207 | Address(base.AsCpuRegister(), FrameOffset(offs.Int32Value()+4))); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 208 | } else if (dest.IsX87Register()) { |
| 209 | if (size == 4) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 210 | __ flds(Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 211 | } else { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 212 | __ fldl(Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 213 | } |
| 214 | } else { |
| 215 | CHECK(dest.IsXmmRegister()); |
| 216 | if (size == 4) { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 217 | __ movss(dest.AsXmmRegister(), Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 218 | } else { |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 219 | __ movsd(dest.AsXmmRegister(), Address(base.AsCpuRegister(), offs)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 220 | } |
| 221 | } |
| 222 | } |
| 223 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 224 | void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) { |
| 225 | X86ManagedRegister dest = mdest.AsX86(); |
| 226 | CHECK(dest.IsCpuRegister()); |
| 227 | __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs)); |
| 228 | } |
| 229 | |
| 230 | void X86JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) { |
| 231 | X86ManagedRegister reg = mreg.AsX86(); |
| 232 | CHECK(size == 1 || size == 2) << size; |
| 233 | CHECK(reg.IsCpuRegister()) << reg; |
| 234 | if (size == 1) { |
| 235 | __ movsxb(reg.AsCpuRegister(), reg.AsByteRegister()); |
| 236 | } else { |
| 237 | __ movsxw(reg.AsCpuRegister(), reg.AsCpuRegister()); |
| 238 | } |
| 239 | } |
| 240 | |
| 241 | void X86JNIMacroAssembler::ZeroExtend(ManagedRegister mreg, size_t size) { |
| 242 | X86ManagedRegister reg = mreg.AsX86(); |
| 243 | CHECK(size == 1 || size == 2) << size; |
| 244 | CHECK(reg.IsCpuRegister()) << reg; |
| 245 | if (size == 1) { |
| 246 | __ movzxb(reg.AsCpuRegister(), reg.AsByteRegister()); |
| 247 | } else { |
| 248 | __ movzxw(reg.AsCpuRegister(), reg.AsCpuRegister()); |
| 249 | } |
| 250 | } |
| 251 | |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 252 | void X86JNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests, |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 253 | ArrayRef<ArgumentLocation> srcs, |
| 254 | ArrayRef<FrameOffset> refs) { |
| 255 | size_t arg_count = dests.size(); |
| 256 | DCHECK_EQ(arg_count, srcs.size()); |
| 257 | DCHECK_EQ(arg_count, refs.size()); |
| 258 | |
| 259 | // Store register args to stack slots. Convert processed references to `jobject`. |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 260 | bool found_hidden_arg = false; |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 261 | for (size_t i = 0; i != arg_count; ++i) { |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 262 | const ArgumentLocation& src = srcs[i]; |
| 263 | const ArgumentLocation& dest = dests[i]; |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 264 | const FrameOffset ref = refs[i]; |
| 265 | DCHECK_EQ(src.GetSize(), dest.GetSize()); // Even for references. |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 266 | if (src.IsRegister()) { |
| 267 | if (UNLIKELY(dest.IsRegister())) { |
Vladimir Marko | 4d52715 | 2021-11-23 12:07:04 +0000 | [diff] [blame] | 268 | if (dest.GetRegister().Equals(src.GetRegister())) { |
| 269 | // JNI compiler sometimes adds a no-op move. |
| 270 | continue; |
| 271 | } |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 272 | // Native ABI has only stack arguments but we may pass one "hidden arg" in register. |
| 273 | CHECK(!found_hidden_arg); |
| 274 | found_hidden_arg = true; |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 275 | DCHECK_EQ(ref, kInvalidReferenceOffset); |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 276 | DCHECK( |
| 277 | !dest.GetRegister().Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister()))); |
| 278 | Move(dest.GetRegister(), src.GetRegister(), dest.GetSize()); |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 279 | } else { |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 280 | if (ref != kInvalidReferenceOffset) { |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 281 | // Note: We can clobber `src` here as the register cannot hold more than one argument. |
| 282 | // This overload of `CreateJObject()` currently does not use the scratch |
| 283 | // register ECX, so this shall not clobber another argument. |
| 284 | CreateJObject(src.GetRegister(), ref, src.GetRegister(), /*null_allowed=*/ i != 0u); |
| 285 | } |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 286 | Store(dest.GetFrameOffset(), src.GetRegister(), dest.GetSize()); |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 287 | } |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 288 | } else { |
| 289 | // Delay copying until we have spilled all registers, including the scratch register ECX. |
| 290 | } |
| 291 | } |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 292 | |
| 293 | // Copy incoming stack args. Convert processed references to `jobject`. |
| 294 | for (size_t i = 0; i != arg_count; ++i) { |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 295 | const ArgumentLocation& src = srcs[i]; |
| 296 | const ArgumentLocation& dest = dests[i]; |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 297 | const FrameOffset ref = refs[i]; |
| 298 | DCHECK_EQ(src.GetSize(), dest.GetSize()); // Even for references. |
Vladimir Marko | 2cefb3b | 2021-02-22 13:32:33 +0000 | [diff] [blame] | 299 | if (!src.IsRegister()) { |
| 300 | DCHECK(!dest.IsRegister()); |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 301 | if (ref != kInvalidReferenceOffset) { |
| 302 | DCHECK_EQ(srcs[i].GetFrameOffset(), refs[i]); |
| 303 | CreateJObject(dest.GetFrameOffset(), ref, /*null_allowed=*/ i != 0u); |
| 304 | } else { |
| 305 | Copy(dest.GetFrameOffset(), src.GetFrameOffset(), dest.GetSize()); |
| 306 | } |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 307 | } |
| 308 | } |
| 309 | } |
| 310 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 311 | void X86JNIMacroAssembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 312 | DCHECK(!mdest.Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister()))); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 313 | X86ManagedRegister dest = mdest.AsX86(); |
| 314 | X86ManagedRegister src = msrc.AsX86(); |
| 315 | if (!dest.Equals(src)) { |
| 316 | if (dest.IsCpuRegister() && src.IsCpuRegister()) { |
| 317 | __ movl(dest.AsCpuRegister(), src.AsCpuRegister()); |
| 318 | } else if (src.IsX87Register() && dest.IsXmmRegister()) { |
| 319 | // Pass via stack and pop X87 register |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 320 | IncreaseFrameSize(16); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 321 | if (size == 4) { |
| 322 | CHECK_EQ(src.AsX87Register(), ST0); |
| 323 | __ fstps(Address(ESP, 0)); |
| 324 | __ movss(dest.AsXmmRegister(), Address(ESP, 0)); |
| 325 | } else { |
| 326 | CHECK_EQ(src.AsX87Register(), ST0); |
| 327 | __ fstpl(Address(ESP, 0)); |
| 328 | __ movsd(dest.AsXmmRegister(), Address(ESP, 0)); |
| 329 | } |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 330 | DecreaseFrameSize(16); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 331 | } else { |
| 332 | // TODO: x87, SSE |
| 333 | UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src; |
| 334 | } |
| 335 | } |
| 336 | } |
| 337 | |
Mythri Alle | bab6beb | 2022-10-21 13:28:05 +0000 | [diff] [blame] | 338 | void X86JNIMacroAssembler::Move(ManagedRegister mdest, size_t value) { |
| 339 | X86ManagedRegister dest = mdest.AsX86(); |
| 340 | __ movl(dest.AsCpuRegister(), Immediate(value)); |
| 341 | } |
| 342 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 343 | void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) { |
| 344 | DCHECK(size == 4 || size == 8) << size; |
| 345 | Register scratch = GetScratchRegister(); |
| 346 | __ movl(scratch, Address(ESP, src)); |
| 347 | __ movl(Address(ESP, dest), scratch); |
| 348 | if (size == 8) { |
| 349 | __ movl(scratch, Address(ESP, FrameOffset(src.Int32Value() + 4))); |
| 350 | __ movl(Address(ESP, FrameOffset(dest.Int32Value() + 4)), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 351 | } |
| 352 | } |
| 353 | |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 354 | void X86JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg, |
| 355 | FrameOffset spilled_reference_offset, |
| 356 | ManagedRegister min_reg, |
| 357 | bool null_allowed) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 358 | X86ManagedRegister out_reg = mout_reg.AsX86(); |
| 359 | X86ManagedRegister in_reg = min_reg.AsX86(); |
| 360 | CHECK(in_reg.IsCpuRegister()); |
| 361 | CHECK(out_reg.IsCpuRegister()); |
| 362 | VerifyObject(in_reg, null_allowed); |
| 363 | if (null_allowed) { |
| 364 | Label null_arg; |
| 365 | if (!out_reg.Equals(in_reg)) { |
| 366 | __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister()); |
| 367 | } |
| 368 | __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister()); |
| 369 | __ j(kZero, &null_arg); |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 370 | __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 371 | __ Bind(&null_arg); |
| 372 | } else { |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 373 | __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 374 | } |
| 375 | } |
| 376 | |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 377 | void X86JNIMacroAssembler::CreateJObject(FrameOffset out_off, |
| 378 | FrameOffset spilled_reference_offset, |
| 379 | bool null_allowed) { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 380 | Register scratch = GetScratchRegister(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 381 | if (null_allowed) { |
| 382 | Label null_arg; |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 383 | __ movl(scratch, Address(ESP, spilled_reference_offset)); |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 384 | __ testl(scratch, scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 385 | __ j(kZero, &null_arg); |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 386 | __ leal(scratch, Address(ESP, spilled_reference_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 387 | __ Bind(&null_arg); |
| 388 | } else { |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 389 | __ leal(scratch, Address(ESP, spilled_reference_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 390 | } |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 391 | __ movl(Address(ESP, out_off), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 392 | } |
| 393 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 394 | void X86JNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) { |
| 395 | // TODO: not validating references |
| 396 | } |
| 397 | |
| 398 | void X86JNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) { |
| 399 | // TODO: not validating references |
| 400 | } |
| 401 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 402 | void X86JNIMacroAssembler::Jump(ManagedRegister mbase, Offset offset) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 403 | X86ManagedRegister base = mbase.AsX86(); |
| 404 | CHECK(base.IsCpuRegister()); |
| 405 | __ jmp(Address(base.AsCpuRegister(), offset.Int32Value())); |
| 406 | } |
| 407 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 408 | void X86JNIMacroAssembler::Call(ManagedRegister mbase, Offset offset) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 409 | X86ManagedRegister base = mbase.AsX86(); |
| 410 | CHECK(base.IsCpuRegister()); |
| 411 | __ call(Address(base.AsCpuRegister(), offset.Int32Value())); |
| 412 | // TODO: place reference map on call |
| 413 | } |
| 414 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 415 | void X86JNIMacroAssembler::CallFromThread(ThreadOffset32 offset) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 416 | __ fs()->call(Address::Absolute(offset)); |
| 417 | } |
| 418 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 419 | void X86JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) { |
| 420 | __ fs()->movl(dest.AsX86().AsCpuRegister(), |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 421 | Address::Absolute(Thread::SelfOffset<kX86PointerSize>())); |
| 422 | } |
| 423 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 424 | void X86JNIMacroAssembler::GetCurrentThread(FrameOffset offset) { |
| 425 | Register scratch = GetScratchRegister(); |
| 426 | __ fs()->movl(scratch, Address::Absolute(Thread::SelfOffset<kX86PointerSize>())); |
| 427 | __ movl(Address(ESP, offset), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 428 | } |
| 429 | |
Vladimir Marko | ce2a344 | 2021-11-24 15:10:26 +0000 | [diff] [blame] | 430 | void X86JNIMacroAssembler::TryToTransitionFromRunnableToNative( |
| 431 | JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs) { |
| 432 | constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative); |
| 433 | constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable); |
| 434 | constexpr ThreadOffset32 thread_flags_offset = Thread::ThreadFlagsOffset<kX86PointerSize>(); |
| 435 | constexpr ThreadOffset32 thread_held_mutex_mutator_lock_offset = |
| 436 | Thread::HeldMutexOffset<kX86PointerSize>(kMutatorLock); |
| 437 | |
| 438 | // We need to preserve managed argument EAX. |
| 439 | DCHECK_GE(scratch_regs.size(), 2u); |
| 440 | Register saved_eax = scratch_regs[0].AsX86().AsCpuRegister(); |
| 441 | Register scratch = scratch_regs[1].AsX86().AsCpuRegister(); |
| 442 | |
Vladimir Marko | 1d7ad90 | 2021-12-08 14:36:44 +0000 | [diff] [blame] | 443 | // CAS release, old_value = kRunnableStateValue, new_value = kNativeStateValue, no flags. |
Vladimir Marko | ce2a344 | 2021-11-24 15:10:26 +0000 | [diff] [blame] | 444 | __ movl(saved_eax, EAX); // Save EAX. |
| 445 | static_assert(kRunnableStateValue == 0u); |
| 446 | __ xorl(EAX, EAX); |
| 447 | __ movl(scratch, Immediate(kNativeStateValue)); |
| 448 | __ fs()->LockCmpxchgl(Address::Absolute(thread_flags_offset.Uint32Value()), scratch); |
| 449 | // LOCK CMPXCHG has full barrier semantics, so we don't need barriers here. |
| 450 | __ movl(EAX, saved_eax); // Restore EAX; MOV does not change flags. |
| 451 | // If any flags are set, go to the slow path. |
| 452 | __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86()); |
| 453 | |
| 454 | // Clear `self->tlsPtr_.held_mutexes[kMutatorLock]`. |
| 455 | __ fs()->movl(Address::Absolute(thread_held_mutex_mutator_lock_offset.Uint32Value()), |
| 456 | Immediate(0)); |
| 457 | } |
| 458 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame] | 459 | void X86JNIMacroAssembler::TryToTransitionFromNativeToRunnable( |
| 460 | JNIMacroLabel* label, |
| 461 | ArrayRef<const ManagedRegister> scratch_regs, |
| 462 | ManagedRegister return_reg) { |
| 463 | constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative); |
| 464 | constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable); |
| 465 | constexpr ThreadOffset32 thread_flags_offset = Thread::ThreadFlagsOffset<kX86PointerSize>(); |
| 466 | constexpr ThreadOffset32 thread_held_mutex_mutator_lock_offset = |
| 467 | Thread::HeldMutexOffset<kX86PointerSize>(kMutatorLock); |
| 468 | constexpr ThreadOffset32 thread_mutator_lock_offset = |
| 469 | Thread::MutatorLockOffset<kX86PointerSize>(); |
| 470 | |
| 471 | size_t scratch_index = 0u; |
| 472 | auto get_scratch_reg = [&]() { |
| 473 | while (true) { |
| 474 | DCHECK_LT(scratch_index, scratch_regs.size()); |
| 475 | X86ManagedRegister scratch_reg = scratch_regs[scratch_index].AsX86(); |
| 476 | ++scratch_index; |
| 477 | DCHECK(!scratch_reg.Overlaps(return_reg.AsX86())); |
| 478 | if (scratch_reg.AsCpuRegister() != EAX) { |
| 479 | return scratch_reg.AsCpuRegister(); |
| 480 | } |
| 481 | } |
| 482 | }; |
| 483 | Register scratch = get_scratch_reg(); |
| 484 | bool preserve_eax = return_reg.AsX86().Overlaps(X86ManagedRegister::FromCpuRegister(EAX)); |
| 485 | Register saved_eax = preserve_eax ? get_scratch_reg() : kNoRegister; |
| 486 | |
| 487 | // CAS acquire, old_value = kNativeStateValue, new_value = kRunnableStateValue, no flags. |
| 488 | if (preserve_eax) { |
| 489 | __ movl(saved_eax, EAX); // Save EAX. |
| 490 | } |
| 491 | __ movl(EAX, Immediate(kNativeStateValue)); |
| 492 | static_assert(kRunnableStateValue == 0u); |
| 493 | __ xorl(scratch, scratch); |
| 494 | __ fs()->LockCmpxchgl(Address::Absolute(thread_flags_offset.Uint32Value()), scratch); |
| 495 | // LOCK CMPXCHG has full barrier semantics, so we don't need barriers here. |
| 496 | if (preserve_eax) { |
| 497 | __ movl(EAX, saved_eax); // Restore EAX; MOV does not change flags. |
| 498 | } |
| 499 | // If any flags are set, or the state is not Native, go to the slow path. |
| 500 | // (While the thread can theoretically transition between different Suspended states, |
| 501 | // it would be very unexpected to see a state other than Native at this point.) |
| 502 | __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86()); |
| 503 | |
| 504 | // Set `self->tlsPtr_.held_mutexes[kMutatorLock]` to the mutator lock. |
| 505 | __ fs()->movl(scratch, Address::Absolute(thread_mutator_lock_offset.Uint32Value())); |
| 506 | __ fs()->movl(Address::Absolute(thread_held_mutex_mutator_lock_offset.Uint32Value()), |
| 507 | scratch); |
| 508 | } |
| 509 | |
Vladimir Marko | 46a8910 | 2021-10-21 13:05:46 +0000 | [diff] [blame] | 510 | void X86JNIMacroAssembler::SuspendCheck(JNIMacroLabel* label) { |
Vladimir Marko | 254a858 | 2021-11-29 14:08:37 +0000 | [diff] [blame] | 511 | __ fs()->testl(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>()), |
| 512 | Immediate(Thread::SuspendOrCheckpointRequestFlags())); |
| 513 | __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86()); |
Vladimir Marko | 46a8910 | 2021-10-21 13:05:46 +0000 | [diff] [blame] | 514 | } |
| 515 | |
Vladimir Marko | c8c2bb6 | 2021-10-15 09:33:09 +0100 | [diff] [blame] | 516 | void X86JNIMacroAssembler::ExceptionPoll(JNIMacroLabel* label) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 517 | __ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0)); |
Vladimir Marko | c8c2bb6 | 2021-10-15 09:33:09 +0100 | [diff] [blame] | 518 | __ j(kNotEqual, X86JNIMacroLabel::Cast(label)->AsX86()); |
| 519 | } |
| 520 | |
| 521 | void X86JNIMacroAssembler::DeliverPendingException() { |
| 522 | // Pass exception as argument in EAX |
| 523 | __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>())); |
| 524 | __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pDeliverException))); |
| 525 | // this call should never return |
| 526 | __ int3(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 527 | } |
| 528 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 529 | std::unique_ptr<JNIMacroLabel> X86JNIMacroAssembler::CreateLabel() { |
| 530 | return std::unique_ptr<JNIMacroLabel>(new X86JNIMacroLabel()); |
| 531 | } |
| 532 | |
| 533 | void X86JNIMacroAssembler::Jump(JNIMacroLabel* label) { |
| 534 | CHECK(label != nullptr); |
| 535 | __ jmp(X86JNIMacroLabel::Cast(label)->AsX86()); |
| 536 | } |
| 537 | |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 538 | static Condition UnaryConditionToX86Condition(JNIMacroUnaryCondition cond) { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 539 | switch (cond) { |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 540 | case JNIMacroUnaryCondition::kZero: |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 541 | return kZero; |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 542 | case JNIMacroUnaryCondition::kNotZero: |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 543 | return kNotZero; |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 544 | default: |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 545 | LOG(FATAL) << "Not implemented condition: " << static_cast<int>(cond); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 546 | UNREACHABLE(); |
| 547 | } |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 548 | } |
| 549 | |
| 550 | void X86JNIMacroAssembler::TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) { |
| 551 | CHECK(label != nullptr); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 552 | |
Vladimir Marko | e42876f | 2020-02-28 16:43:06 +0000 | [diff] [blame] | 553 | // CMP self->tls32_.is_gc_marking, 0 |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 554 | // Jcc <Offset> |
Vladimir Marko | e42876f | 2020-02-28 16:43:06 +0000 | [diff] [blame] | 555 | DCHECK_EQ(Thread::IsGcMarkingSize(), 4u); |
| 556 | __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>()), Immediate(0)); |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 557 | __ j(UnaryConditionToX86Condition(cond), X86JNIMacroLabel::Cast(label)->AsX86()); |
| 558 | } |
| 559 | |
| 560 | void X86JNIMacroAssembler::TestMarkBit(ManagedRegister mref, |
| 561 | JNIMacroLabel* label, |
| 562 | JNIMacroUnaryCondition cond) { |
| 563 | DCHECK(kUseBakerReadBarrier); |
| 564 | Register ref = mref.AsX86().AsCpuRegister(); |
| 565 | static_assert(LockWord::kMarkBitStateSize == 1u); |
| 566 | __ testl(Address(ref, mirror::Object::MonitorOffset().SizeValue()), |
| 567 | Immediate(LockWord::kMarkBitStateMaskShifted)); |
| 568 | __ j(UnaryConditionToX86Condition(cond), X86JNIMacroLabel::Cast(label)->AsX86()); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 569 | } |
| 570 | |
Mythri Alle | 5eb7ad2 | 2022-07-05 12:44:52 +0000 | [diff] [blame] | 571 | |
| 572 | void X86JNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) { |
| 573 | __ cmpb(Address::Absolute(address), Immediate(0)); |
| 574 | __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86()); |
| 575 | } |
| 576 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 577 | void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) { |
| 578 | CHECK(label != nullptr); |
| 579 | __ Bind(X86JNIMacroLabel::Cast(label)->AsX86()); |
| 580 | } |
| 581 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 582 | #undef __ |
| 583 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 584 | } // namespace x86 |
| 585 | } // namespace art |