Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 17 | #include "jni_macro_assembler_arm_vixl.h" |
| 18 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 19 | #include <iostream> |
| 20 | #include <type_traits> |
| 21 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 22 | #include "entrypoints/quick/quick_entrypoints.h" |
| 23 | #include "thread.h" |
| 24 | |
| 25 | using namespace vixl::aarch32; // NOLINT(build/namespaces) |
| 26 | namespace vixl32 = vixl::aarch32; |
| 27 | |
Artem Serov | 0fb3719 | 2016-12-06 18:13:40 +0000 | [diff] [blame] | 28 | using vixl::ExactAssemblyScope; |
| 29 | using vixl::CodeBufferCheckScope; |
| 30 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 31 | namespace art { |
| 32 | namespace arm { |
| 33 | |
| 34 | #ifdef ___ |
| 35 | #error "ARM Assembler macro already defined." |
| 36 | #else |
| 37 | #define ___ asm_.GetVIXLAssembler()-> |
| 38 | #endif |
| 39 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 40 | vixl::aarch32::Register AsVIXLRegister(ArmManagedRegister reg) { |
| 41 | CHECK(reg.IsCoreRegister()); |
| 42 | return vixl::aarch32::Register(reg.RegId()); |
| 43 | } |
| 44 | |
| 45 | static inline vixl::aarch32::SRegister AsVIXLSRegister(ArmManagedRegister reg) { |
| 46 | CHECK(reg.IsSRegister()); |
| 47 | return vixl::aarch32::SRegister(reg.RegId() - kNumberOfCoreRegIds); |
| 48 | } |
| 49 | |
| 50 | static inline vixl::aarch32::DRegister AsVIXLDRegister(ArmManagedRegister reg) { |
| 51 | CHECK(reg.IsDRegister()); |
| 52 | return vixl::aarch32::DRegister(reg.RegId() - kNumberOfCoreRegIds - kNumberOfSRegIds); |
| 53 | } |
| 54 | |
| 55 | static inline vixl::aarch32::Register AsVIXLRegisterPairLow(ArmManagedRegister reg) { |
| 56 | return vixl::aarch32::Register(reg.AsRegisterPairLow()); |
| 57 | } |
| 58 | |
| 59 | static inline vixl::aarch32::Register AsVIXLRegisterPairHigh(ArmManagedRegister reg) { |
| 60 | return vixl::aarch32::Register(reg.AsRegisterPairHigh()); |
| 61 | } |
| 62 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 63 | void ArmVIXLJNIMacroAssembler::FinalizeCode() { |
| 64 | for (const std::unique_ptr< |
| 65 | ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) { |
| 66 | EmitExceptionPoll(exception.get()); |
| 67 | } |
| 68 | asm_.FinalizeCode(); |
| 69 | } |
| 70 | |
| 71 | static dwarf::Reg DWARFReg(vixl32::Register reg) { |
| 72 | return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode())); |
| 73 | } |
| 74 | |
| 75 | static dwarf::Reg DWARFReg(vixl32::SRegister reg) { |
| 76 | return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode())); |
| 77 | } |
| 78 | |
Mathieu Chartier | 6beced4 | 2016-11-15 15:51:31 -0800 | [diff] [blame] | 79 | static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 80 | |
| 81 | void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size, |
| 82 | ManagedRegister method_reg, |
| 83 | ArrayRef<const ManagedRegister> callee_save_regs, |
| 84 | const ManagedRegisterEntrySpills& entry_spills) { |
| 85 | CHECK_ALIGNED(frame_size, kStackAlignment); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 86 | CHECK(r0.Is(AsVIXLRegister(method_reg.AsArm()))); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 87 | |
| 88 | // Push callee saves and link register. |
| 89 | RegList core_spill_mask = 1 << LR; |
| 90 | uint32_t fp_spill_mask = 0; |
| 91 | for (const ManagedRegister& reg : callee_save_regs) { |
| 92 | if (reg.AsArm().IsCoreRegister()) { |
| 93 | core_spill_mask |= 1 << reg.AsArm().AsCoreRegister(); |
| 94 | } else { |
| 95 | fp_spill_mask |= 1 << reg.AsArm().AsSRegister(); |
| 96 | } |
| 97 | } |
| 98 | ___ Push(RegisterList(core_spill_mask)); |
| 99 | cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize); |
| 100 | cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize); |
| 101 | if (fp_spill_mask != 0) { |
| 102 | uint32_t first = CTZ(fp_spill_mask); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 103 | |
| 104 | // Check that list is contiguous. |
| 105 | DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask))); |
| 106 | |
Anton Kirilov | 52d0fce | 2016-09-02 20:55:46 +0100 | [diff] [blame] | 107 | ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask))); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 108 | cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize); |
| 109 | cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize); |
| 110 | } |
| 111 | |
| 112 | // Increase frame to required size. |
| 113 | int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask); |
| 114 | // Must at least have space for Method*. |
| 115 | CHECK_GT(frame_size, pushed_values * kFramePointerSize); |
| 116 | IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well. |
| 117 | |
| 118 | // Write out Method*. |
| 119 | asm_.StoreToOffset(kStoreWord, r0, sp, 0); |
| 120 | |
| 121 | // Write out entry spills. |
| 122 | int32_t offset = frame_size + kFramePointerSize; |
| 123 | for (size_t i = 0; i < entry_spills.size(); ++i) { |
| 124 | ArmManagedRegister reg = entry_spills.at(i).AsArm(); |
| 125 | if (reg.IsNoRegister()) { |
| 126 | // only increment stack offset. |
| 127 | ManagedRegisterSpill spill = entry_spills.at(i); |
| 128 | offset += spill.getSize(); |
| 129 | } else if (reg.IsCoreRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 130 | asm_.StoreToOffset(kStoreWord, AsVIXLRegister(reg), sp, offset); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 131 | offset += 4; |
| 132 | } else if (reg.IsSRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 133 | asm_.StoreSToOffset(AsVIXLSRegister(reg), sp, offset); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 134 | offset += 4; |
| 135 | } else if (reg.IsDRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 136 | asm_.StoreDToOffset(AsVIXLDRegister(reg), sp, offset); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 137 | offset += 8; |
| 138 | } |
| 139 | } |
| 140 | } |
| 141 | |
| 142 | void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size, |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 143 | ArrayRef<const ManagedRegister> callee_save_regs, |
| 144 | bool may_suspend) { |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 145 | CHECK_ALIGNED(frame_size, kStackAlignment); |
| 146 | cfi().RememberState(); |
| 147 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 148 | // Compute callee saves to pop and LR. |
| 149 | RegList core_spill_mask = 1 << LR; |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 150 | uint32_t fp_spill_mask = 0; |
| 151 | for (const ManagedRegister& reg : callee_save_regs) { |
| 152 | if (reg.AsArm().IsCoreRegister()) { |
| 153 | core_spill_mask |= 1 << reg.AsArm().AsCoreRegister(); |
| 154 | } else { |
| 155 | fp_spill_mask |= 1 << reg.AsArm().AsSRegister(); |
| 156 | } |
| 157 | } |
| 158 | |
| 159 | // Decrease frame to start of callee saves. |
| 160 | int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask); |
| 161 | CHECK_GT(frame_size, pop_values * kFramePointerSize); |
| 162 | DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well. |
| 163 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 164 | // Pop FP callee saves. |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 165 | if (fp_spill_mask != 0) { |
| 166 | uint32_t first = CTZ(fp_spill_mask); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 167 | // Check that list is contiguous. |
| 168 | DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask))); |
| 169 | |
Anton Kirilov | 52d0fce | 2016-09-02 20:55:46 +0100 | [diff] [blame] | 170 | ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask))); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 171 | cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask)); |
| 172 | cfi().RestoreMany(DWARFReg(s0), fp_spill_mask); |
| 173 | } |
| 174 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 175 | // Pop core callee saves and LR. |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 176 | ___ Pop(RegisterList(core_spill_mask)); |
| 177 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 178 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 179 | if (may_suspend) { |
| 180 | // The method may be suspended; refresh the Marking Register. |
| 181 | ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value())); |
| 182 | } else { |
| 183 | // The method shall not be suspended; no need to refresh the Marking Register. |
| 184 | |
| 185 | // Check that the Marking Register is a callee-save register, |
| 186 | // and thus has been preserved by native code following the |
| 187 | // AAPCS calling convention. |
| 188 | DCHECK_NE(core_spill_mask & (1 << MR), 0) |
| 189 | << "core_spill_mask should contain Marking Register R" << MR; |
Roland Levillain | a820ff7 | 2017-10-05 19:18:25 +0100 | [diff] [blame] | 190 | |
| 191 | // The following condition is a compile-time one, so it does not have a run-time cost. |
| 192 | if (kIsDebugBuild) { |
| 193 | // The following condition is a run-time one; it is executed after the |
| 194 | // previous compile-time test, to avoid penalizing non-debug builds. |
| 195 | if (emit_run_time_checks_in_debug_mode_) { |
| 196 | // Emit a run-time check verifying that the Marking Register is up-to-date. |
| 197 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
| 198 | vixl32::Register temp = temps.Acquire(); |
| 199 | // Ensure we are not clobbering a callee-save register that was restored before. |
| 200 | DCHECK_EQ(core_spill_mask & (1 << temp.GetCode()), 0) |
| 201 | << "core_spill_mask hould not contain scratch register R" << temp.GetCode(); |
| 202 | asm_.GenerateMarkingRegisterCheck(temp); |
| 203 | } |
| 204 | } |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 205 | } |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 206 | } |
| 207 | |
| 208 | // Return to LR. |
| 209 | ___ Bx(vixl32::lr); |
| 210 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 211 | // The CFI should be restored for any code that follows the exit block. |
| 212 | cfi().RestoreState(); |
| 213 | cfi().DefCFAOffset(frame_size); |
| 214 | } |
| 215 | |
| 216 | |
| 217 | void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) { |
| 218 | asm_.AddConstant(sp, -adjust); |
| 219 | cfi().AdjustCFAOffset(adjust); |
| 220 | } |
| 221 | |
| 222 | void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) { |
| 223 | asm_.AddConstant(sp, adjust); |
| 224 | cfi().AdjustCFAOffset(-adjust); |
| 225 | } |
| 226 | |
| 227 | void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) { |
| 228 | ArmManagedRegister src = m_src.AsArm(); |
| 229 | if (src.IsNoRegister()) { |
| 230 | CHECK_EQ(0u, size); |
| 231 | } else if (src.IsCoreRegister()) { |
| 232 | CHECK_EQ(4u, size); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 233 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 234 | temps.Exclude(AsVIXLRegister(src)); |
| 235 | asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 236 | } else if (src.IsRegisterPair()) { |
| 237 | CHECK_EQ(8u, size); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 238 | asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairLow(src), sp, dest.Int32Value()); |
| 239 | asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairHigh(src), sp, dest.Int32Value() + 4); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 240 | } else if (src.IsSRegister()) { |
| 241 | CHECK_EQ(4u, size); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 242 | asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 243 | } else { |
| 244 | CHECK_EQ(8u, size); |
| 245 | CHECK(src.IsDRegister()) << src; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 246 | asm_.StoreDToOffset(AsVIXLDRegister(src), sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 247 | } |
| 248 | } |
| 249 | |
| 250 | void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 251 | vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 252 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 253 | temps.Exclude(src); |
| 254 | asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 255 | } |
| 256 | |
| 257 | void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 258 | vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 259 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 260 | temps.Exclude(src); |
| 261 | asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 262 | } |
| 263 | |
| 264 | void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest, |
| 265 | ManagedRegister msrc, |
| 266 | FrameOffset in_off, |
| 267 | ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 268 | vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm()); |
| 269 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
| 270 | asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 271 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 272 | temps.Exclude(scratch); |
| 273 | asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value()); |
| 274 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 275 | } |
| 276 | |
| 277 | void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest, |
| 278 | FrameOffset src, |
| 279 | ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 280 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 281 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 282 | temps.Exclude(scratch); |
| 283 | asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value()); |
| 284 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 285 | } |
| 286 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 287 | void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister mdest, |
| 288 | ManagedRegister mbase, |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 289 | MemberOffset offs, |
| 290 | bool unpoison_reference) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 291 | vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm()); |
| 292 | vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 293 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 294 | temps.Exclude(dest, base); |
| 295 | asm_.LoadFromOffset(kLoadWord, dest, base, offs.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 296 | |
| 297 | if (unpoison_reference) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 298 | asm_.MaybeUnpoisonHeapReference(dest); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 299 | } |
| 300 | } |
| 301 | |
| 302 | void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED, |
| 303 | FrameOffset src ATTRIBUTE_UNUSED) { |
| 304 | UNIMPLEMENTED(FATAL); |
| 305 | } |
| 306 | |
| 307 | void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED, |
| 308 | ManagedRegister base ATTRIBUTE_UNUSED, |
| 309 | Offset offs ATTRIBUTE_UNUSED) { |
| 310 | UNIMPLEMENTED(FATAL); |
| 311 | } |
| 312 | |
| 313 | void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, |
| 314 | uint32_t imm, |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 315 | ManagedRegister mscratch) { |
| 316 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 317 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 318 | temps.Exclude(scratch); |
| 319 | asm_.LoadImmediate(scratch, imm); |
| 320 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 321 | } |
| 322 | |
| 323 | void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) { |
| 324 | return Load(m_dst.AsArm(), sp, src.Int32Value(), size); |
| 325 | } |
| 326 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 327 | void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst, |
| 328 | ThreadOffset32 src, |
| 329 | size_t size) { |
| 330 | return Load(m_dst.AsArm(), tr, src.Int32Value(), size); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 331 | } |
| 332 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 333 | void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) { |
| 334 | vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 335 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 336 | temps.Exclude(dest); |
| 337 | asm_.LoadFromOffset(kLoadWord, dest, tr, offs.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 338 | } |
| 339 | |
| 340 | void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, |
| 341 | ThreadOffset32 thr_offs, |
| 342 | ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 343 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 344 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 345 | temps.Exclude(scratch); |
| 346 | asm_.LoadFromOffset(kLoadWord, scratch, tr, thr_offs.Int32Value()); |
| 347 | asm_.StoreToOffset(kStoreWord, scratch, sp, fr_offs.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 348 | } |
| 349 | |
| 350 | void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED, |
| 351 | FrameOffset fr_offs ATTRIBUTE_UNUSED, |
| 352 | ManagedRegister mscratch ATTRIBUTE_UNUSED) { |
| 353 | UNIMPLEMENTED(FATAL); |
| 354 | } |
| 355 | |
| 356 | void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs, |
| 357 | FrameOffset fr_offs, |
| 358 | ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 359 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 360 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 361 | temps.Exclude(scratch); |
| 362 | asm_.AddConstant(scratch, sp, fr_offs.Int32Value()); |
| 363 | asm_.StoreToOffset(kStoreWord, scratch, tr, thr_offs.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 364 | } |
| 365 | |
| 366 | void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) { |
| 367 | asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value()); |
| 368 | } |
| 369 | |
| 370 | void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED, |
| 371 | size_t size ATTRIBUTE_UNUSED) { |
| 372 | UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm"; |
| 373 | } |
| 374 | |
| 375 | void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED, |
| 376 | size_t size ATTRIBUTE_UNUSED) { |
| 377 | UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm"; |
| 378 | } |
| 379 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 380 | void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst, |
| 381 | ManagedRegister msrc, |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 382 | size_t size ATTRIBUTE_UNUSED) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 383 | ArmManagedRegister dst = mdst.AsArm(); |
| 384 | ArmManagedRegister src = msrc.AsArm(); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 385 | if (!dst.Equals(src)) { |
| 386 | if (dst.IsCoreRegister()) { |
| 387 | CHECK(src.IsCoreRegister()) << src; |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 388 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 389 | temps.Exclude(AsVIXLRegister(dst)); |
| 390 | ___ Mov(AsVIXLRegister(dst), AsVIXLRegister(src)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 391 | } else if (dst.IsDRegister()) { |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 392 | if (src.IsDRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 393 | ___ Vmov(F64, AsVIXLDRegister(dst), AsVIXLDRegister(src)); |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 394 | } else { |
| 395 | // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi}) |
| 396 | CHECK(src.IsRegisterPair()) << src; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 397 | ___ Vmov(AsVIXLDRegister(dst), AsVIXLRegisterPairLow(src), AsVIXLRegisterPairHigh(src)); |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 398 | } |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 399 | } else if (dst.IsSRegister()) { |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 400 | if (src.IsSRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 401 | ___ Vmov(F32, AsVIXLSRegister(dst), AsVIXLSRegister(src)); |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 402 | } else { |
| 403 | // VMOV Sn, Rn (Sn = Rn) |
| 404 | CHECK(src.IsCoreRegister()) << src; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 405 | ___ Vmov(AsVIXLSRegister(dst), AsVIXLRegister(src)); |
Igor Murashkin | a3735f7 | 2016-09-14 13:36:16 -0700 | [diff] [blame] | 406 | } |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 407 | } else { |
| 408 | CHECK(dst.IsRegisterPair()) << dst; |
| 409 | CHECK(src.IsRegisterPair()) << src; |
| 410 | // Ensure that the first move doesn't clobber the input of the second. |
| 411 | if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 412 | ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src)); |
| 413 | ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 414 | } else { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 415 | ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src)); |
| 416 | ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 417 | } |
| 418 | } |
| 419 | } |
| 420 | } |
| 421 | |
| 422 | void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest, |
| 423 | FrameOffset src, |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 424 | ManagedRegister mscratch, |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 425 | size_t size) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 426 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 427 | CHECK(size == 4 || size == 8) << size; |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 428 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 429 | temps.Exclude(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 430 | if (size == 4) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 431 | asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value()); |
| 432 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 433 | } else if (size == 8) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 434 | asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value()); |
| 435 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); |
| 436 | asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value() + 4); |
| 437 | asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 438 | } |
| 439 | } |
| 440 | |
| 441 | void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED, |
| 442 | ManagedRegister src_base ATTRIBUTE_UNUSED, |
| 443 | Offset src_offset ATTRIBUTE_UNUSED, |
| 444 | ManagedRegister mscratch ATTRIBUTE_UNUSED, |
| 445 | size_t size ATTRIBUTE_UNUSED) { |
| 446 | UNIMPLEMENTED(FATAL); |
| 447 | } |
| 448 | |
| 449 | void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED, |
| 450 | Offset dest_offset ATTRIBUTE_UNUSED, |
| 451 | FrameOffset src ATTRIBUTE_UNUSED, |
| 452 | ManagedRegister mscratch ATTRIBUTE_UNUSED, |
| 453 | size_t size ATTRIBUTE_UNUSED) { |
| 454 | UNIMPLEMENTED(FATAL); |
| 455 | } |
| 456 | |
| 457 | void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED, |
| 458 | FrameOffset src_base ATTRIBUTE_UNUSED, |
| 459 | Offset src_offset ATTRIBUTE_UNUSED, |
| 460 | ManagedRegister mscratch ATTRIBUTE_UNUSED, |
| 461 | size_t size ATTRIBUTE_UNUSED) { |
| 462 | UNIMPLEMENTED(FATAL); |
| 463 | } |
| 464 | |
| 465 | void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED, |
| 466 | Offset dest_offset ATTRIBUTE_UNUSED, |
| 467 | ManagedRegister src ATTRIBUTE_UNUSED, |
| 468 | Offset src_offset ATTRIBUTE_UNUSED, |
| 469 | ManagedRegister mscratch ATTRIBUTE_UNUSED, |
| 470 | size_t size ATTRIBUTE_UNUSED) { |
| 471 | UNIMPLEMENTED(FATAL); |
| 472 | } |
| 473 | |
| 474 | void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED, |
| 475 | Offset dest_offset ATTRIBUTE_UNUSED, |
| 476 | FrameOffset src ATTRIBUTE_UNUSED, |
| 477 | Offset src_offset ATTRIBUTE_UNUSED, |
| 478 | ManagedRegister scratch ATTRIBUTE_UNUSED, |
| 479 | size_t size ATTRIBUTE_UNUSED) { |
| 480 | UNIMPLEMENTED(FATAL); |
| 481 | } |
| 482 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 483 | void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg, |
| 484 | FrameOffset handle_scope_offset, |
| 485 | ManagedRegister min_reg, |
| 486 | bool null_allowed) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 487 | vixl::aarch32::Register out_reg = AsVIXLRegister(mout_reg.AsArm()); |
| 488 | vixl::aarch32::Register in_reg = |
| 489 | min_reg.AsArm().IsNoRegister() ? vixl::aarch32::Register() : AsVIXLRegister(min_reg.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 490 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 491 | temps.Exclude(out_reg); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 492 | if (null_allowed) { |
| 493 | // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is |
| 494 | // the address in the handle scope holding the reference. |
| 495 | // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset) |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 496 | if (!in_reg.IsValid()) { |
| 497 | asm_.LoadFromOffset(kLoadWord, out_reg, sp, handle_scope_offset.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 498 | in_reg = out_reg; |
| 499 | } |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 500 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 501 | temps.Exclude(in_reg); |
| 502 | ___ Cmp(in_reg, 0); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 503 | |
Vladimir Marko | f0a6a1d | 2018-01-08 14:23:56 +0000 | [diff] [blame] | 504 | if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 505 | if (!out_reg.Is(in_reg)) { |
Artem Serov | 0fb3719 | 2016-12-06 18:13:40 +0000 | [diff] [blame] | 506 | ExactAssemblyScope guard(asm_.GetVIXLAssembler(), |
| 507 | 3 * vixl32::kMaxInstructionSizeInBytes, |
| 508 | CodeBufferCheckScope::kMaximumSize); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 509 | ___ it(eq, 0xc); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 510 | ___ mov(eq, out_reg, 0); |
| 511 | asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 512 | } else { |
Artem Serov | 0fb3719 | 2016-12-06 18:13:40 +0000 | [diff] [blame] | 513 | ExactAssemblyScope guard(asm_.GetVIXLAssembler(), |
| 514 | 2 * vixl32::kMaxInstructionSizeInBytes, |
| 515 | CodeBufferCheckScope::kMaximumSize); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 516 | ___ it(ne, 0x8); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 517 | asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 518 | } |
| 519 | } else { |
| 520 | // TODO: Implement this (old arm assembler would have crashed here). |
| 521 | UNIMPLEMENTED(FATAL); |
| 522 | } |
| 523 | } else { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 524 | asm_.AddConstant(out_reg, sp, handle_scope_offset.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 525 | } |
| 526 | } |
| 527 | |
| 528 | void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off, |
| 529 | FrameOffset handle_scope_offset, |
| 530 | ManagedRegister mscratch, |
| 531 | bool null_allowed) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 532 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 533 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 534 | temps.Exclude(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 535 | if (null_allowed) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 536 | asm_.LoadFromOffset(kLoadWord, scratch, sp, handle_scope_offset.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 537 | // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is |
| 538 | // the address in the handle scope holding the reference. |
| 539 | // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset) |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 540 | ___ Cmp(scratch, 0); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 541 | |
Vladimir Marko | f0a6a1d | 2018-01-08 14:23:56 +0000 | [diff] [blame] | 542 | if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) { |
Artem Serov | 0fb3719 | 2016-12-06 18:13:40 +0000 | [diff] [blame] | 543 | ExactAssemblyScope guard(asm_.GetVIXLAssembler(), |
| 544 | 2 * vixl32::kMaxInstructionSizeInBytes, |
| 545 | CodeBufferCheckScope::kMaximumSize); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 546 | ___ it(ne, 0x8); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 547 | asm_.AddConstantInIt(scratch, sp, handle_scope_offset.Int32Value(), ne); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 548 | } else { |
| 549 | // TODO: Implement this (old arm assembler would have crashed here). |
| 550 | UNIMPLEMENTED(FATAL); |
| 551 | } |
| 552 | } else { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 553 | asm_.AddConstant(scratch, sp, handle_scope_offset.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 554 | } |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 555 | asm_.StoreToOffset(kStoreWord, scratch, sp, out_off.Int32Value()); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 556 | } |
| 557 | |
| 558 | void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope( |
| 559 | ManagedRegister mout_reg ATTRIBUTE_UNUSED, |
| 560 | ManagedRegister min_reg ATTRIBUTE_UNUSED) { |
| 561 | UNIMPLEMENTED(FATAL); |
| 562 | } |
| 563 | |
| 564 | void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED, |
| 565 | bool could_be_null ATTRIBUTE_UNUSED) { |
| 566 | // TODO: not validating references. |
| 567 | } |
| 568 | |
| 569 | void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED, |
| 570 | bool could_be_null ATTRIBUTE_UNUSED) { |
| 571 | // TODO: not validating references. |
| 572 | } |
| 573 | |
| 574 | void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase, |
| 575 | Offset offset, |
| 576 | ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 577 | vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm()); |
| 578 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 579 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 580 | temps.Exclude(scratch); |
| 581 | asm_.LoadFromOffset(kLoadWord, scratch, base, offset.Int32Value()); |
| 582 | ___ Blx(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 583 | // TODO: place reference map on call. |
| 584 | } |
| 585 | |
| 586 | void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 587 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 588 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 589 | temps.Exclude(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 590 | // Call *(*(SP + base) + offset) |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 591 | asm_.LoadFromOffset(kLoadWord, scratch, sp, base.Int32Value()); |
| 592 | asm_.LoadFromOffset(kLoadWord, scratch, scratch, offset.Int32Value()); |
| 593 | ___ Blx(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 594 | // TODO: place reference map on call |
| 595 | } |
| 596 | |
| 597 | void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED, |
| 598 | ManagedRegister scratch ATTRIBUTE_UNUSED) { |
| 599 | UNIMPLEMENTED(FATAL); |
| 600 | } |
| 601 | |
| 602 | void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) { |
Artem Serov | 6287c23 | 2016-11-29 13:31:33 +0000 | [diff] [blame] | 603 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 604 | temps.Exclude(AsVIXLRegister(mtr.AsArm())); |
| 605 | ___ Mov(AsVIXLRegister(mtr.AsArm()), tr); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 606 | } |
| 607 | |
| 608 | void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset, |
| 609 | ManagedRegister scratch ATTRIBUTE_UNUSED) { |
| 610 | asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value()); |
| 611 | } |
| 612 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 613 | void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) { |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 614 | CHECK_ALIGNED(stack_adjust, kStackAlignment); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 615 | vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 616 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 617 | temps.Exclude(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 618 | exception_blocks_.emplace_back( |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 619 | new ArmVIXLJNIMacroAssembler::ArmException(mscratch.AsArm(), stack_adjust)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 620 | asm_.LoadFromOffset(kLoadWord, |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 621 | scratch, |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 622 | tr, |
| 623 | Thread::ExceptionOffset<kArmPointerSize>().Int32Value()); |
| 624 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 625 | ___ Cmp(scratch, 0); |
Artem Serov | 672b9c1 | 2017-12-05 18:04:07 +0000 | [diff] [blame] | 626 | vixl32::Label* label = exception_blocks_.back()->Entry(); |
| 627 | ___ BPreferNear(ne, label); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 628 | // TODO: think about using CBNZ here. |
| 629 | } |
| 630 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 631 | std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() { |
| 632 | return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel()); |
| 633 | } |
| 634 | |
| 635 | void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) { |
| 636 | CHECK(label != nullptr); |
| 637 | ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); |
| 638 | } |
| 639 | |
| 640 | void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label, |
| 641 | JNIMacroUnaryCondition condition, |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 642 | ManagedRegister mtest) { |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 643 | CHECK(label != nullptr); |
| 644 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 645 | vixl::aarch32::Register test = AsVIXLRegister(mtest.AsArm()); |
Artem Serov | 6287c23 | 2016-11-29 13:31:33 +0000 | [diff] [blame] | 646 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 647 | temps.Exclude(test); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 648 | switch (condition) { |
| 649 | case JNIMacroUnaryCondition::kZero: |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 650 | ___ CompareAndBranchIfZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 651 | break; |
| 652 | case JNIMacroUnaryCondition::kNotZero: |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 653 | ___ CompareAndBranchIfNonZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 654 | break; |
| 655 | default: |
| 656 | LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition); |
| 657 | UNREACHABLE(); |
| 658 | } |
| 659 | } |
| 660 | |
| 661 | void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) { |
| 662 | CHECK(label != nullptr); |
| 663 | ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); |
| 664 | } |
| 665 | |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 666 | void ArmVIXLJNIMacroAssembler::EmitExceptionPoll( |
| 667 | ArmVIXLJNIMacroAssembler::ArmException* exception) { |
| 668 | ___ Bind(exception->Entry()); |
| 669 | if (exception->stack_adjust_ != 0) { // Fix up the frame. |
| 670 | DecreaseFrameSize(exception->stack_adjust_); |
| 671 | } |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 672 | |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 673 | vixl::aarch32::Register scratch = AsVIXLRegister(exception->scratch_); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 674 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 675 | temps.Exclude(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 676 | // Pass exception object as argument. |
| 677 | // Don't care about preserving r0 as this won't return. |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 678 | ___ Mov(r0, scratch); |
| 679 | temps.Include(scratch); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 680 | // TODO: check that exception->scratch_ is dead by this point. |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 681 | vixl32::Register temp = temps.Acquire(); |
| 682 | ___ Ldr(temp, |
| 683 | MemOperand(tr, |
| 684 | QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value())); |
| 685 | ___ Blx(temp); |
| 686 | } |
| 687 | |
| 688 | void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) { |
| 689 | UNIMPLEMENTED(FATAL); |
| 690 | } |
| 691 | |
| 692 | void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister |
| 693 | dest, |
| 694 | vixl32::Register base, |
| 695 | int32_t offset, |
| 696 | size_t size) { |
| 697 | if (dest.IsNoRegister()) { |
| 698 | CHECK_EQ(0u, size) << dest; |
| 699 | } else if (dest.IsCoreRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 700 | vixl::aarch32::Register dst = AsVIXLRegister(dest); |
| 701 | CHECK(!dst.Is(sp)) << dest; |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 702 | |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 703 | UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 704 | temps.Exclude(dst); |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 705 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 706 | if (size == 1u) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 707 | ___ Ldrb(dst, MemOperand(base, offset)); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 708 | } else { |
| 709 | CHECK_EQ(4u, size) << dest; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 710 | ___ Ldr(dst, MemOperand(base, offset)); |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 711 | } |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 712 | } else if (dest.IsRegisterPair()) { |
| 713 | CHECK_EQ(8u, size) << dest; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 714 | ___ Ldr(AsVIXLRegisterPairLow(dest), MemOperand(base, offset)); |
| 715 | ___ Ldr(AsVIXLRegisterPairHigh(dest), MemOperand(base, offset + 4)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 716 | } else if (dest.IsSRegister()) { |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 717 | ___ Vldr(AsVIXLSRegister(dest), MemOperand(base, offset)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 718 | } else { |
| 719 | CHECK(dest.IsDRegister()) << dest; |
Vladimir Marko | d1fa440 | 2018-05-03 15:43:13 +0100 | [diff] [blame] | 720 | ___ Vldr(AsVIXLDRegister(dest), MemOperand(base, offset)); |
Artem Serov | 12e097c | 2016-08-08 15:13:26 +0100 | [diff] [blame] | 721 | } |
| 722 | } |
| 723 | |
| 724 | } // namespace arm |
| 725 | } // namespace art |