Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "jni_macro_assembler_x86.h" |
| 18 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 19 | #include "base/casts.h" |
| 20 | #include "entrypoints/quick/quick_entrypoints.h" |
| 21 | #include "thread.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 22 | #include "utils/assembler.h" |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 23 | |
| 24 | namespace art { |
| 25 | namespace x86 { |
| 26 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 27 | static Register GetScratchRegister() { |
| 28 | // ECX is an argument register on entry and gets spilled in BuildFrame(). |
| 29 | // After that, we can use it as a scratch register. |
| 30 | return ECX; |
| 31 | } |
| 32 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 33 | // Slowpath entered when Thread::Current()->_exception is non-null |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 34 | class X86ExceptionSlowPath final : public SlowPath { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 35 | public: |
| 36 | explicit X86ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {} |
Roland Levillain | f73caca | 2018-08-24 17:19:07 +0100 | [diff] [blame] | 37 | void Emit(Assembler *sp_asm) override; |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 38 | private: |
| 39 | const size_t stack_adjust_; |
| 40 | }; |
| 41 | |
| 42 | static dwarf::Reg DWARFReg(Register reg) { |
| 43 | return dwarf::Reg::X86Core(static_cast<int>(reg)); |
| 44 | } |
| 45 | |
| 46 | constexpr size_t kFramePointerSize = 4; |
| 47 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 48 | static constexpr size_t kNativeStackAlignment = 16; |
| 49 | static_assert(kNativeStackAlignment == kStackAlignment); |
| 50 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 51 | #define __ asm_. |
| 52 | |
| 53 | void X86JNIMacroAssembler::BuildFrame(size_t frame_size, |
| 54 | ManagedRegister method_reg, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 55 | ArrayRef<const ManagedRegister> spill_regs) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 56 | DCHECK_EQ(CodeSize(), 0U); // Nothing emitted yet. |
| 57 | cfi().SetCurrentCFAOffset(4); // Return address on stack. |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 58 | if (frame_size == kFramePointerSize) { |
| 59 | // For @CriticalNative tail call. |
| 60 | CHECK(method_reg.IsNoRegister()); |
| 61 | CHECK(spill_regs.empty()); |
| 62 | } else if (method_reg.IsNoRegister()) { |
| 63 | CHECK_ALIGNED(frame_size, kNativeStackAlignment); |
| 64 | } else { |
| 65 | CHECK_ALIGNED(frame_size, kStackAlignment); |
| 66 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 67 | int gpr_count = 0; |
| 68 | for (int i = spill_regs.size() - 1; i >= 0; --i) { |
| 69 | Register spill = spill_regs[i].AsX86().AsCpuRegister(); |
| 70 | __ pushl(spill); |
| 71 | gpr_count++; |
| 72 | cfi().AdjustCFAOffset(kFramePointerSize); |
| 73 | cfi().RelOffset(DWARFReg(spill), 0); |
| 74 | } |
| 75 | |
| 76 | // return address then method on stack. |
| 77 | int32_t adjust = frame_size - gpr_count * kFramePointerSize - |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 78 | kFramePointerSize /*return address*/ - |
| 79 | (method_reg.IsRegister() ? kFramePointerSize /*method*/ : 0u); |
| 80 | if (adjust != 0) { |
| 81 | __ addl(ESP, Immediate(-adjust)); |
| 82 | cfi().AdjustCFAOffset(adjust); |
| 83 | } |
| 84 | if (method_reg.IsRegister()) { |
| 85 | __ pushl(method_reg.AsX86().AsCpuRegister()); |
| 86 | cfi().AdjustCFAOffset(kFramePointerSize); |
| 87 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 88 | DCHECK_EQ(static_cast<size_t>(cfi().GetCurrentCFAOffset()), frame_size); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | void X86JNIMacroAssembler::RemoveFrame(size_t frame_size, |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 92 | ArrayRef<const ManagedRegister> spill_regs, |
| 93 | bool may_suspend ATTRIBUTE_UNUSED) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 94 | CHECK_ALIGNED(frame_size, kNativeStackAlignment); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 95 | cfi().RememberState(); |
| 96 | // -kFramePointerSize for ArtMethod*. |
| 97 | int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize; |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 98 | if (adjust != 0) { |
| 99 | __ addl(ESP, Immediate(adjust)); |
| 100 | cfi().AdjustCFAOffset(-adjust); |
| 101 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 102 | for (size_t i = 0; i < spill_regs.size(); ++i) { |
| 103 | Register spill = spill_regs[i].AsX86().AsCpuRegister(); |
| 104 | __ popl(spill); |
| 105 | cfi().AdjustCFAOffset(-static_cast<int>(kFramePointerSize)); |
| 106 | cfi().Restore(DWARFReg(spill)); |
| 107 | } |
| 108 | __ ret(); |
| 109 | // The CFI should be restored for any code that follows the exit block. |
| 110 | cfi().RestoreState(); |
| 111 | cfi().DefCFAOffset(frame_size); |
| 112 | } |
| 113 | |
| 114 | void X86JNIMacroAssembler::IncreaseFrameSize(size_t adjust) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 115 | if (adjust != 0u) { |
| 116 | CHECK_ALIGNED(adjust, kNativeStackAlignment); |
| 117 | __ addl(ESP, Immediate(-adjust)); |
| 118 | cfi().AdjustCFAOffset(adjust); |
| 119 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 120 | } |
| 121 | |
| 122 | static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 123 | if (adjust != 0u) { |
| 124 | CHECK_ALIGNED(adjust, kNativeStackAlignment); |
| 125 | assembler->addl(ESP, Immediate(adjust)); |
| 126 | assembler->cfi().AdjustCFAOffset(-adjust); |
| 127 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 128 | } |
| 129 | |
| 130 | void X86JNIMacroAssembler::DecreaseFrameSize(size_t adjust) { |
| 131 | DecreaseFrameSizeImpl(&asm_, adjust); |
| 132 | } |
| 133 | |
| 134 | void X86JNIMacroAssembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) { |
| 135 | X86ManagedRegister src = msrc.AsX86(); |
| 136 | if (src.IsNoRegister()) { |
| 137 | CHECK_EQ(0u, size); |
| 138 | } else if (src.IsCpuRegister()) { |
| 139 | CHECK_EQ(4u, size); |
| 140 | __ movl(Address(ESP, offs), src.AsCpuRegister()); |
| 141 | } else if (src.IsRegisterPair()) { |
| 142 | CHECK_EQ(8u, size); |
| 143 | __ movl(Address(ESP, offs), src.AsRegisterPairLow()); |
| 144 | __ movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), src.AsRegisterPairHigh()); |
| 145 | } else if (src.IsX87Register()) { |
| 146 | if (size == 4) { |
| 147 | __ fstps(Address(ESP, offs)); |
| 148 | } else { |
| 149 | __ fstpl(Address(ESP, offs)); |
| 150 | } |
| 151 | } else { |
| 152 | CHECK(src.IsXmmRegister()); |
| 153 | if (size == 4) { |
| 154 | __ movss(Address(ESP, offs), src.AsXmmRegister()); |
| 155 | } else { |
| 156 | __ movsd(Address(ESP, offs), src.AsXmmRegister()); |
| 157 | } |
| 158 | } |
| 159 | } |
| 160 | |
| 161 | void X86JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { |
| 162 | X86ManagedRegister src = msrc.AsX86(); |
| 163 | CHECK(src.IsCpuRegister()); |
| 164 | __ movl(Address(ESP, dest), src.AsCpuRegister()); |
| 165 | } |
| 166 | |
| 167 | void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { |
| 168 | X86ManagedRegister src = msrc.AsX86(); |
| 169 | CHECK(src.IsCpuRegister()); |
| 170 | __ movl(Address(ESP, dest), src.AsCpuRegister()); |
| 171 | } |
| 172 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 173 | void X86JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 174 | __ movl(Address(ESP, dest), Immediate(imm)); |
| 175 | } |
| 176 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 177 | void X86JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) { |
| 178 | Register scratch = GetScratchRegister(); |
| 179 | __ leal(scratch, Address(ESP, fr_offs)); |
| 180 | __ fs()->movl(Address::Absolute(thr_offs), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 181 | } |
| 182 | |
| 183 | void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) { |
| 184 | __ fs()->movl(Address::Absolute(thr_offs), ESP); |
| 185 | } |
| 186 | |
| 187 | void X86JNIMacroAssembler::StoreSpanning(FrameOffset /*dst*/, |
| 188 | ManagedRegister /*src*/, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 189 | FrameOffset /*in_off*/) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 190 | UNIMPLEMENTED(FATAL); // this case only currently exists for ARM |
| 191 | } |
| 192 | |
| 193 | void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) { |
| 194 | X86ManagedRegister dest = mdest.AsX86(); |
| 195 | if (dest.IsNoRegister()) { |
| 196 | CHECK_EQ(0u, size); |
| 197 | } else if (dest.IsCpuRegister()) { |
| 198 | CHECK_EQ(4u, size); |
| 199 | __ movl(dest.AsCpuRegister(), Address(ESP, src)); |
| 200 | } else if (dest.IsRegisterPair()) { |
| 201 | CHECK_EQ(8u, size); |
| 202 | __ movl(dest.AsRegisterPairLow(), Address(ESP, src)); |
| 203 | __ movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4))); |
| 204 | } else if (dest.IsX87Register()) { |
| 205 | if (size == 4) { |
| 206 | __ flds(Address(ESP, src)); |
| 207 | } else { |
| 208 | __ fldl(Address(ESP, src)); |
| 209 | } |
| 210 | } else { |
| 211 | CHECK(dest.IsXmmRegister()); |
| 212 | if (size == 4) { |
| 213 | __ movss(dest.AsXmmRegister(), Address(ESP, src)); |
| 214 | } else { |
| 215 | __ movsd(dest.AsXmmRegister(), Address(ESP, src)); |
| 216 | } |
| 217 | } |
| 218 | } |
| 219 | |
| 220 | void X86JNIMacroAssembler::LoadFromThread(ManagedRegister mdest, ThreadOffset32 src, size_t size) { |
| 221 | X86ManagedRegister dest = mdest.AsX86(); |
| 222 | if (dest.IsNoRegister()) { |
| 223 | CHECK_EQ(0u, size); |
| 224 | } else if (dest.IsCpuRegister()) { |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 225 | if (size == 1u) { |
| 226 | __ fs()->movzxb(dest.AsCpuRegister(), Address::Absolute(src)); |
| 227 | } else { |
| 228 | CHECK_EQ(4u, size); |
| 229 | __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(src)); |
| 230 | } |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 231 | } else if (dest.IsRegisterPair()) { |
| 232 | CHECK_EQ(8u, size); |
| 233 | __ fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src)); |
| 234 | __ fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset32(src.Int32Value()+4))); |
| 235 | } else if (dest.IsX87Register()) { |
| 236 | if (size == 4) { |
| 237 | __ fs()->flds(Address::Absolute(src)); |
| 238 | } else { |
| 239 | __ fs()->fldl(Address::Absolute(src)); |
| 240 | } |
| 241 | } else { |
| 242 | CHECK(dest.IsXmmRegister()); |
| 243 | if (size == 4) { |
| 244 | __ fs()->movss(dest.AsXmmRegister(), Address::Absolute(src)); |
| 245 | } else { |
| 246 | __ fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src)); |
| 247 | } |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) { |
| 252 | X86ManagedRegister dest = mdest.AsX86(); |
| 253 | CHECK(dest.IsCpuRegister()); |
| 254 | __ movl(dest.AsCpuRegister(), Address(ESP, src)); |
| 255 | } |
| 256 | |
| 257 | void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs, |
| 258 | bool unpoison_reference) { |
| 259 | X86ManagedRegister dest = mdest.AsX86(); |
| 260 | CHECK(dest.IsCpuRegister() && dest.IsCpuRegister()); |
| 261 | __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs)); |
| 262 | if (unpoison_reference) { |
| 263 | __ MaybeUnpoisonHeapReference(dest.AsCpuRegister()); |
| 264 | } |
| 265 | } |
| 266 | |
| 267 | void X86JNIMacroAssembler::LoadRawPtr(ManagedRegister mdest, |
| 268 | ManagedRegister base, |
| 269 | Offset offs) { |
| 270 | X86ManagedRegister dest = mdest.AsX86(); |
| 271 | CHECK(dest.IsCpuRegister() && dest.IsCpuRegister()); |
| 272 | __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs)); |
| 273 | } |
| 274 | |
| 275 | void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) { |
| 276 | X86ManagedRegister dest = mdest.AsX86(); |
| 277 | CHECK(dest.IsCpuRegister()); |
| 278 | __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs)); |
| 279 | } |
| 280 | |
| 281 | void X86JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) { |
| 282 | X86ManagedRegister reg = mreg.AsX86(); |
| 283 | CHECK(size == 1 || size == 2) << size; |
| 284 | CHECK(reg.IsCpuRegister()) << reg; |
| 285 | if (size == 1) { |
| 286 | __ movsxb(reg.AsCpuRegister(), reg.AsByteRegister()); |
| 287 | } else { |
| 288 | __ movsxw(reg.AsCpuRegister(), reg.AsCpuRegister()); |
| 289 | } |
| 290 | } |
| 291 | |
| 292 | void X86JNIMacroAssembler::ZeroExtend(ManagedRegister mreg, size_t size) { |
| 293 | X86ManagedRegister reg = mreg.AsX86(); |
| 294 | CHECK(size == 1 || size == 2) << size; |
| 295 | CHECK(reg.IsCpuRegister()) << reg; |
| 296 | if (size == 1) { |
| 297 | __ movzxb(reg.AsCpuRegister(), reg.AsByteRegister()); |
| 298 | } else { |
| 299 | __ movzxw(reg.AsCpuRegister(), reg.AsCpuRegister()); |
| 300 | } |
| 301 | } |
| 302 | |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 303 | void X86JNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests, |
| 304 | ArrayRef<ArgumentLocation> srcs) { |
| 305 | DCHECK_EQ(dests.size(), srcs.size()); |
| 306 | bool found_hidden_arg = false; |
| 307 | for (size_t i = 0, arg_count = srcs.size(); i != arg_count; ++i) { |
| 308 | const ArgumentLocation& src = srcs[i]; |
| 309 | const ArgumentLocation& dest = dests[i]; |
| 310 | DCHECK_EQ(src.GetSize(), dest.GetSize()); |
| 311 | if (UNLIKELY(dest.IsRegister())) { |
| 312 | // Native ABI has only stack arguments but we may pass one "hidden arg" in register. |
| 313 | CHECK(!found_hidden_arg); |
| 314 | found_hidden_arg = true; |
| 315 | CHECK(src.IsRegister()); |
| 316 | Move(dest.GetRegister(), src.GetRegister(), dest.GetSize()); |
| 317 | } else { |
| 318 | if (src.IsRegister()) { |
| 319 | Store(dest.GetFrameOffset(), src.GetRegister(), dest.GetSize()); |
| 320 | } else { |
| 321 | Copy(dest.GetFrameOffset(), src.GetFrameOffset(), dest.GetSize()); |
| 322 | } |
| 323 | } |
| 324 | } |
| 325 | } |
| 326 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 327 | void X86JNIMacroAssembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 328 | DCHECK(!mdest.Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister()))); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 329 | X86ManagedRegister dest = mdest.AsX86(); |
| 330 | X86ManagedRegister src = msrc.AsX86(); |
| 331 | if (!dest.Equals(src)) { |
| 332 | if (dest.IsCpuRegister() && src.IsCpuRegister()) { |
| 333 | __ movl(dest.AsCpuRegister(), src.AsCpuRegister()); |
| 334 | } else if (src.IsX87Register() && dest.IsXmmRegister()) { |
| 335 | // Pass via stack and pop X87 register |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 336 | IncreaseFrameSize(16); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 337 | if (size == 4) { |
| 338 | CHECK_EQ(src.AsX87Register(), ST0); |
| 339 | __ fstps(Address(ESP, 0)); |
| 340 | __ movss(dest.AsXmmRegister(), Address(ESP, 0)); |
| 341 | } else { |
| 342 | CHECK_EQ(src.AsX87Register(), ST0); |
| 343 | __ fstpl(Address(ESP, 0)); |
| 344 | __ movsd(dest.AsXmmRegister(), Address(ESP, 0)); |
| 345 | } |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 346 | DecreaseFrameSize(16); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 347 | } else { |
| 348 | // TODO: x87, SSE |
| 349 | UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src; |
| 350 | } |
| 351 | } |
| 352 | } |
| 353 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 354 | void X86JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) { |
| 355 | Register scratch = GetScratchRegister(); |
| 356 | __ movl(scratch, Address(ESP, src)); |
| 357 | __ movl(Address(ESP, dest), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 358 | } |
| 359 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 360 | void X86JNIMacroAssembler::CopyRef(FrameOffset dest, |
| 361 | ManagedRegister base, |
| 362 | MemberOffset offs, |
| 363 | bool unpoison_reference) { |
| 364 | Register scratch = GetScratchRegister(); |
| 365 | __ movl(scratch, Address(base.AsX86().AsCpuRegister(), offs)); |
| 366 | if (unpoison_reference) { |
| 367 | __ MaybeUnpoisonHeapReference(scratch); |
| 368 | } |
| 369 | __ movl(Address(ESP, dest), scratch); |
| 370 | } |
| 371 | |
| 372 | void X86JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) { |
| 373 | Register scratch = GetScratchRegister(); |
| 374 | __ fs()->movl(scratch, Address::Absolute(thr_offs)); |
| 375 | __ movl(Address(ESP, fr_offs), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 376 | } |
| 377 | |
| 378 | void X86JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs, |
| 379 | FrameOffset fr_offs, |
| 380 | ManagedRegister mscratch) { |
| 381 | X86ManagedRegister scratch = mscratch.AsX86(); |
| 382 | CHECK(scratch.IsCpuRegister()); |
| 383 | Load(scratch, fr_offs, 4); |
| 384 | __ fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister()); |
| 385 | } |
| 386 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 387 | void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) { |
| 388 | DCHECK(size == 4 || size == 8) << size; |
| 389 | Register scratch = GetScratchRegister(); |
| 390 | __ movl(scratch, Address(ESP, src)); |
| 391 | __ movl(Address(ESP, dest), scratch); |
| 392 | if (size == 8) { |
| 393 | __ movl(scratch, Address(ESP, FrameOffset(src.Int32Value() + 4))); |
| 394 | __ movl(Address(ESP, FrameOffset(dest.Int32Value() + 4)), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 395 | } |
| 396 | } |
| 397 | |
| 398 | void X86JNIMacroAssembler::Copy(FrameOffset /*dst*/, |
| 399 | ManagedRegister /*src_base*/, |
| 400 | Offset /*src_offset*/, |
| 401 | ManagedRegister /*scratch*/, |
| 402 | size_t /*size*/) { |
| 403 | UNIMPLEMENTED(FATAL); |
| 404 | } |
| 405 | |
| 406 | void X86JNIMacroAssembler::Copy(ManagedRegister dest_base, |
| 407 | Offset dest_offset, |
| 408 | FrameOffset src, |
| 409 | ManagedRegister scratch, |
| 410 | size_t size) { |
| 411 | CHECK(scratch.IsNoRegister()); |
| 412 | CHECK_EQ(size, 4u); |
| 413 | __ pushl(Address(ESP, src)); |
| 414 | __ popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset)); |
| 415 | } |
| 416 | |
| 417 | void X86JNIMacroAssembler::Copy(FrameOffset dest, |
| 418 | FrameOffset src_base, |
| 419 | Offset src_offset, |
| 420 | ManagedRegister mscratch, |
| 421 | size_t size) { |
| 422 | Register scratch = mscratch.AsX86().AsCpuRegister(); |
| 423 | CHECK_EQ(size, 4u); |
| 424 | __ movl(scratch, Address(ESP, src_base)); |
| 425 | __ movl(scratch, Address(scratch, src_offset)); |
| 426 | __ movl(Address(ESP, dest), scratch); |
| 427 | } |
| 428 | |
| 429 | void X86JNIMacroAssembler::Copy(ManagedRegister dest, |
| 430 | Offset dest_offset, |
| 431 | ManagedRegister src, |
| 432 | Offset src_offset, |
| 433 | ManagedRegister scratch, |
| 434 | size_t size) { |
| 435 | CHECK_EQ(size, 4u); |
| 436 | CHECK(scratch.IsNoRegister()); |
| 437 | __ pushl(Address(src.AsX86().AsCpuRegister(), src_offset)); |
| 438 | __ popl(Address(dest.AsX86().AsCpuRegister(), dest_offset)); |
| 439 | } |
| 440 | |
| 441 | void X86JNIMacroAssembler::Copy(FrameOffset dest, |
| 442 | Offset dest_offset, |
| 443 | FrameOffset src, |
| 444 | Offset src_offset, |
| 445 | ManagedRegister mscratch, |
| 446 | size_t size) { |
| 447 | Register scratch = mscratch.AsX86().AsCpuRegister(); |
| 448 | CHECK_EQ(size, 4u); |
| 449 | CHECK_EQ(dest.Int32Value(), src.Int32Value()); |
| 450 | __ movl(scratch, Address(ESP, src)); |
| 451 | __ pushl(Address(scratch, src_offset)); |
| 452 | __ popl(Address(scratch, dest_offset)); |
| 453 | } |
| 454 | |
| 455 | void X86JNIMacroAssembler::MemoryBarrier(ManagedRegister) { |
| 456 | __ mfence(); |
| 457 | } |
| 458 | |
| 459 | void X86JNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg, |
| 460 | FrameOffset handle_scope_offset, |
| 461 | ManagedRegister min_reg, |
| 462 | bool null_allowed) { |
| 463 | X86ManagedRegister out_reg = mout_reg.AsX86(); |
| 464 | X86ManagedRegister in_reg = min_reg.AsX86(); |
| 465 | CHECK(in_reg.IsCpuRegister()); |
| 466 | CHECK(out_reg.IsCpuRegister()); |
| 467 | VerifyObject(in_reg, null_allowed); |
| 468 | if (null_allowed) { |
| 469 | Label null_arg; |
| 470 | if (!out_reg.Equals(in_reg)) { |
| 471 | __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister()); |
| 472 | } |
| 473 | __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister()); |
| 474 | __ j(kZero, &null_arg); |
| 475 | __ leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset)); |
| 476 | __ Bind(&null_arg); |
| 477 | } else { |
| 478 | __ leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset)); |
| 479 | } |
| 480 | } |
| 481 | |
| 482 | void X86JNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off, |
| 483 | FrameOffset handle_scope_offset, |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 484 | bool null_allowed) { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 485 | Register scratch = GetScratchRegister(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 486 | if (null_allowed) { |
| 487 | Label null_arg; |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 488 | __ movl(scratch, Address(ESP, handle_scope_offset)); |
| 489 | __ testl(scratch, scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 490 | __ j(kZero, &null_arg); |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 491 | __ leal(scratch, Address(ESP, handle_scope_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 492 | __ Bind(&null_arg); |
| 493 | } else { |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 494 | __ leal(scratch, Address(ESP, handle_scope_offset)); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 495 | } |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 496 | __ movl(Address(ESP, out_off), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 497 | } |
| 498 | |
| 499 | // Given a handle scope entry, load the associated reference. |
| 500 | void X86JNIMacroAssembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg, |
| 501 | ManagedRegister min_reg) { |
| 502 | X86ManagedRegister out_reg = mout_reg.AsX86(); |
| 503 | X86ManagedRegister in_reg = min_reg.AsX86(); |
| 504 | CHECK(out_reg.IsCpuRegister()); |
| 505 | CHECK(in_reg.IsCpuRegister()); |
| 506 | Label null_arg; |
| 507 | if (!out_reg.Equals(in_reg)) { |
| 508 | __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister()); |
| 509 | } |
| 510 | __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister()); |
| 511 | __ j(kZero, &null_arg); |
| 512 | __ movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0)); |
| 513 | __ Bind(&null_arg); |
| 514 | } |
| 515 | |
| 516 | void X86JNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) { |
| 517 | // TODO: not validating references |
| 518 | } |
| 519 | |
| 520 | void X86JNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) { |
| 521 | // TODO: not validating references |
| 522 | } |
| 523 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 524 | void X86JNIMacroAssembler::Jump(ManagedRegister mbase, Offset offset) { |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 525 | X86ManagedRegister base = mbase.AsX86(); |
| 526 | CHECK(base.IsCpuRegister()); |
| 527 | __ jmp(Address(base.AsCpuRegister(), offset.Int32Value())); |
| 528 | } |
| 529 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 530 | void X86JNIMacroAssembler::Call(ManagedRegister mbase, Offset offset) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 531 | X86ManagedRegister base = mbase.AsX86(); |
| 532 | CHECK(base.IsCpuRegister()); |
| 533 | __ call(Address(base.AsCpuRegister(), offset.Int32Value())); |
| 534 | // TODO: place reference map on call |
| 535 | } |
| 536 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 537 | void X86JNIMacroAssembler::Call(FrameOffset base, Offset offset) { |
| 538 | Register scratch = GetScratchRegister(); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 539 | __ movl(scratch, Address(ESP, base)); |
| 540 | __ call(Address(scratch, offset)); |
| 541 | } |
| 542 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 543 | void X86JNIMacroAssembler::CallFromThread(ThreadOffset32 offset) { |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 544 | __ fs()->call(Address::Absolute(offset)); |
| 545 | } |
| 546 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 547 | void X86JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) { |
| 548 | __ fs()->movl(dest.AsX86().AsCpuRegister(), |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 549 | Address::Absolute(Thread::SelfOffset<kX86PointerSize>())); |
| 550 | } |
| 551 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 552 | void X86JNIMacroAssembler::GetCurrentThread(FrameOffset offset) { |
| 553 | Register scratch = GetScratchRegister(); |
| 554 | __ fs()->movl(scratch, Address::Absolute(Thread::SelfOffset<kX86PointerSize>())); |
| 555 | __ movl(Address(ESP, offset), scratch); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 556 | } |
| 557 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 558 | void X86JNIMacroAssembler::ExceptionPoll(size_t stack_adjust) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 559 | X86ExceptionSlowPath* slow = new (__ GetAllocator()) X86ExceptionSlowPath(stack_adjust); |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 560 | __ GetBuffer()->EnqueueSlowPath(slow); |
| 561 | __ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0)); |
| 562 | __ j(kNotEqual, slow->Entry()); |
| 563 | } |
| 564 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 565 | std::unique_ptr<JNIMacroLabel> X86JNIMacroAssembler::CreateLabel() { |
| 566 | return std::unique_ptr<JNIMacroLabel>(new X86JNIMacroLabel()); |
| 567 | } |
| 568 | |
| 569 | void X86JNIMacroAssembler::Jump(JNIMacroLabel* label) { |
| 570 | CHECK(label != nullptr); |
| 571 | __ jmp(X86JNIMacroLabel::Cast(label)->AsX86()); |
| 572 | } |
| 573 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 574 | void X86JNIMacroAssembler::TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) { |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 575 | CHECK(label != nullptr); |
| 576 | |
| 577 | art::x86::Condition x86_cond; |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 578 | switch (cond) { |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 579 | case JNIMacroUnaryCondition::kZero: |
| 580 | x86_cond = art::x86::kZero; |
| 581 | break; |
| 582 | case JNIMacroUnaryCondition::kNotZero: |
| 583 | x86_cond = art::x86::kNotZero; |
| 584 | break; |
| 585 | default: |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 586 | LOG(FATAL) << "Not implemented condition: " << static_cast<int>(cond); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 587 | UNREACHABLE(); |
| 588 | } |
| 589 | |
Vladimir Marko | e42876f | 2020-02-28 16:43:06 +0000 | [diff] [blame] | 590 | // CMP self->tls32_.is_gc_marking, 0 |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 591 | // Jcc <Offset> |
Vladimir Marko | e42876f | 2020-02-28 16:43:06 +0000 | [diff] [blame] | 592 | DCHECK_EQ(Thread::IsGcMarkingSize(), 4u); |
| 593 | __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>()), Immediate(0)); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 594 | __ j(x86_cond, X86JNIMacroLabel::Cast(label)->AsX86()); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 595 | } |
| 596 | |
| 597 | void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) { |
| 598 | CHECK(label != nullptr); |
| 599 | __ Bind(X86JNIMacroLabel::Cast(label)->AsX86()); |
| 600 | } |
| 601 | |
Andreas Gampe | 9954e3b | 2016-08-05 20:34:39 -0700 | [diff] [blame] | 602 | #undef __ |
| 603 | |
| 604 | void X86ExceptionSlowPath::Emit(Assembler *sasm) { |
| 605 | X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm); |
| 606 | #define __ sp_asm-> |
| 607 | __ Bind(&entry_); |
| 608 | // Note: the return value is dead |
| 609 | if (stack_adjust_ != 0) { // Fix up the frame. |
| 610 | DecreaseFrameSizeImpl(sp_asm, stack_adjust_); |
| 611 | } |
| 612 | // Pass exception as argument in EAX |
| 613 | __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>())); |
| 614 | __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pDeliverException))); |
| 615 | // this call should never return |
| 616 | __ int3(); |
| 617 | #undef __ |
| 618 | } |
| 619 | |
| 620 | } // namespace x86 |
| 621 | } // namespace art |