blob: 2c428fac7edd80052ada24a11243e23032977c23 [file] [log] [blame]
Artem Serov12e097c2016-08-08 15:13:26 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070017#include "jni_macro_assembler_arm_vixl.h"
18
Artem Serov12e097c2016-08-08 15:13:26 +010019#include <iostream>
20#include <type_traits>
21
Artem Serov12e097c2016-08-08 15:13:26 +010022#include "entrypoints/quick/quick_entrypoints.h"
23#include "thread.h"
24
25using namespace vixl::aarch32; // NOLINT(build/namespaces)
26namespace vixl32 = vixl::aarch32;
27
Artem Serov0fb37192016-12-06 18:13:40 +000028using vixl::ExactAssemblyScope;
29using vixl::CodeBufferCheckScope;
30
Artem Serov12e097c2016-08-08 15:13:26 +010031namespace art {
32namespace arm {
33
34#ifdef ___
35#error "ARM Assembler macro already defined."
36#else
37#define ___ asm_.GetVIXLAssembler()->
38#endif
39
Vladimir Markod1fa4402018-05-03 15:43:13 +010040vixl::aarch32::Register AsVIXLRegister(ArmManagedRegister reg) {
41 CHECK(reg.IsCoreRegister());
42 return vixl::aarch32::Register(reg.RegId());
43}
44
45static inline vixl::aarch32::SRegister AsVIXLSRegister(ArmManagedRegister reg) {
46 CHECK(reg.IsSRegister());
47 return vixl::aarch32::SRegister(reg.RegId() - kNumberOfCoreRegIds);
48}
49
50static inline vixl::aarch32::DRegister AsVIXLDRegister(ArmManagedRegister reg) {
51 CHECK(reg.IsDRegister());
52 return vixl::aarch32::DRegister(reg.RegId() - kNumberOfCoreRegIds - kNumberOfSRegIds);
53}
54
55static inline vixl::aarch32::Register AsVIXLRegisterPairLow(ArmManagedRegister reg) {
56 return vixl::aarch32::Register(reg.AsRegisterPairLow());
57}
58
59static inline vixl::aarch32::Register AsVIXLRegisterPairHigh(ArmManagedRegister reg) {
60 return vixl::aarch32::Register(reg.AsRegisterPairHigh());
61}
62
Artem Serov12e097c2016-08-08 15:13:26 +010063void ArmVIXLJNIMacroAssembler::FinalizeCode() {
64 for (const std::unique_ptr<
65 ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
66 EmitExceptionPoll(exception.get());
67 }
68 asm_.FinalizeCode();
69}
70
71static dwarf::Reg DWARFReg(vixl32::Register reg) {
72 return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
73}
74
75static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
76 return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
77}
78
Mathieu Chartier6beced42016-11-15 15:51:31 -080079static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
Artem Serov12e097c2016-08-08 15:13:26 +010080
81void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
82 ManagedRegister method_reg,
83 ArrayRef<const ManagedRegister> callee_save_regs,
84 const ManagedRegisterEntrySpills& entry_spills) {
85 CHECK_ALIGNED(frame_size, kStackAlignment);
Vladimir Markod1fa4402018-05-03 15:43:13 +010086 CHECK(r0.Is(AsVIXLRegister(method_reg.AsArm())));
Artem Serov12e097c2016-08-08 15:13:26 +010087
88 // Push callee saves and link register.
89 RegList core_spill_mask = 1 << LR;
90 uint32_t fp_spill_mask = 0;
91 for (const ManagedRegister& reg : callee_save_regs) {
92 if (reg.AsArm().IsCoreRegister()) {
93 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
94 } else {
95 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
96 }
97 }
98 ___ Push(RegisterList(core_spill_mask));
99 cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
100 cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
101 if (fp_spill_mask != 0) {
102 uint32_t first = CTZ(fp_spill_mask);
Artem Serov12e097c2016-08-08 15:13:26 +0100103
104 // Check that list is contiguous.
105 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
106
Anton Kirilov52d0fce2016-09-02 20:55:46 +0100107 ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
Artem Serov12e097c2016-08-08 15:13:26 +0100108 cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
109 cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
110 }
111
112 // Increase frame to required size.
113 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
114 // Must at least have space for Method*.
115 CHECK_GT(frame_size, pushed_values * kFramePointerSize);
116 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
117
118 // Write out Method*.
119 asm_.StoreToOffset(kStoreWord, r0, sp, 0);
120
121 // Write out entry spills.
122 int32_t offset = frame_size + kFramePointerSize;
123 for (size_t i = 0; i < entry_spills.size(); ++i) {
124 ArmManagedRegister reg = entry_spills.at(i).AsArm();
125 if (reg.IsNoRegister()) {
126 // only increment stack offset.
127 ManagedRegisterSpill spill = entry_spills.at(i);
128 offset += spill.getSize();
129 } else if (reg.IsCoreRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100130 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(reg), sp, offset);
Artem Serov12e097c2016-08-08 15:13:26 +0100131 offset += 4;
132 } else if (reg.IsSRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100133 asm_.StoreSToOffset(AsVIXLSRegister(reg), sp, offset);
Artem Serov12e097c2016-08-08 15:13:26 +0100134 offset += 4;
135 } else if (reg.IsDRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100136 asm_.StoreDToOffset(AsVIXLDRegister(reg), sp, offset);
Artem Serov12e097c2016-08-08 15:13:26 +0100137 offset += 8;
138 }
139 }
140}
141
142void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
Roland Levillain0d127e12017-07-05 17:01:11 +0100143 ArrayRef<const ManagedRegister> callee_save_regs,
144 bool may_suspend) {
Artem Serov12e097c2016-08-08 15:13:26 +0100145 CHECK_ALIGNED(frame_size, kStackAlignment);
146 cfi().RememberState();
147
Roland Levillain6d729a72017-06-30 18:34:01 +0100148 // Compute callee saves to pop and LR.
149 RegList core_spill_mask = 1 << LR;
Artem Serov12e097c2016-08-08 15:13:26 +0100150 uint32_t fp_spill_mask = 0;
151 for (const ManagedRegister& reg : callee_save_regs) {
152 if (reg.AsArm().IsCoreRegister()) {
153 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
154 } else {
155 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
156 }
157 }
158
159 // Decrease frame to start of callee saves.
160 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
161 CHECK_GT(frame_size, pop_values * kFramePointerSize);
162 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
163
Roland Levillain6d729a72017-06-30 18:34:01 +0100164 // Pop FP callee saves.
Artem Serov12e097c2016-08-08 15:13:26 +0100165 if (fp_spill_mask != 0) {
166 uint32_t first = CTZ(fp_spill_mask);
Artem Serov12e097c2016-08-08 15:13:26 +0100167 // Check that list is contiguous.
168 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
169
Anton Kirilov52d0fce2016-09-02 20:55:46 +0100170 ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
Artem Serov12e097c2016-08-08 15:13:26 +0100171 cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
172 cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
173 }
174
Roland Levillain6d729a72017-06-30 18:34:01 +0100175 // Pop core callee saves and LR.
Artem Serov12e097c2016-08-08 15:13:26 +0100176 ___ Pop(RegisterList(core_spill_mask));
177
Roland Levillain6d729a72017-06-30 18:34:01 +0100178 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain0d127e12017-07-05 17:01:11 +0100179 if (may_suspend) {
180 // The method may be suspended; refresh the Marking Register.
181 ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
182 } else {
183 // The method shall not be suspended; no need to refresh the Marking Register.
184
185 // Check that the Marking Register is a callee-save register,
186 // and thus has been preserved by native code following the
187 // AAPCS calling convention.
188 DCHECK_NE(core_spill_mask & (1 << MR), 0)
189 << "core_spill_mask should contain Marking Register R" << MR;
Roland Levillaina820ff72017-10-05 19:18:25 +0100190
191 // The following condition is a compile-time one, so it does not have a run-time cost.
192 if (kIsDebugBuild) {
193 // The following condition is a run-time one; it is executed after the
194 // previous compile-time test, to avoid penalizing non-debug builds.
195 if (emit_run_time_checks_in_debug_mode_) {
196 // Emit a run-time check verifying that the Marking Register is up-to-date.
197 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
198 vixl32::Register temp = temps.Acquire();
199 // Ensure we are not clobbering a callee-save register that was restored before.
200 DCHECK_EQ(core_spill_mask & (1 << temp.GetCode()), 0)
201 << "core_spill_mask hould not contain scratch register R" << temp.GetCode();
202 asm_.GenerateMarkingRegisterCheck(temp);
203 }
204 }
Roland Levillain0d127e12017-07-05 17:01:11 +0100205 }
Roland Levillain6d729a72017-06-30 18:34:01 +0100206 }
207
208 // Return to LR.
209 ___ Bx(vixl32::lr);
210
Artem Serov12e097c2016-08-08 15:13:26 +0100211 // The CFI should be restored for any code that follows the exit block.
212 cfi().RestoreState();
213 cfi().DefCFAOffset(frame_size);
214}
215
216
217void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
218 asm_.AddConstant(sp, -adjust);
219 cfi().AdjustCFAOffset(adjust);
220}
221
222void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
223 asm_.AddConstant(sp, adjust);
224 cfi().AdjustCFAOffset(-adjust);
225}
226
227void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
228 ArmManagedRegister src = m_src.AsArm();
229 if (src.IsNoRegister()) {
230 CHECK_EQ(0u, size);
231 } else if (src.IsCoreRegister()) {
232 CHECK_EQ(4u, size);
Scott Wakelingb77051e2016-11-21 19:46:00 +0000233 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100234 temps.Exclude(AsVIXLRegister(src));
235 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100236 } else if (src.IsRegisterPair()) {
237 CHECK_EQ(8u, size);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100238 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairLow(src), sp, dest.Int32Value());
239 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairHigh(src), sp, dest.Int32Value() + 4);
Artem Serov12e097c2016-08-08 15:13:26 +0100240 } else if (src.IsSRegister()) {
241 CHECK_EQ(4u, size);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100242 asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100243 } else {
244 CHECK_EQ(8u, size);
245 CHECK(src.IsDRegister()) << src;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100246 asm_.StoreDToOffset(AsVIXLDRegister(src), sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100247 }
248}
249
250void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100251 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000252 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100253 temps.Exclude(src);
254 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100255}
256
257void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100258 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000259 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100260 temps.Exclude(src);
261 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100262}
263
264void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
265 ManagedRegister msrc,
266 FrameOffset in_off,
267 ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100268 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
269 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
270 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000271 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100272 temps.Exclude(scratch);
273 asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value());
274 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
Artem Serov12e097c2016-08-08 15:13:26 +0100275}
276
277void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
278 FrameOffset src,
279 ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100280 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000281 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100282 temps.Exclude(scratch);
283 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
284 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100285}
286
Vladimir Markod1fa4402018-05-03 15:43:13 +0100287void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister mdest,
288 ManagedRegister mbase,
Artem Serov12e097c2016-08-08 15:13:26 +0100289 MemberOffset offs,
290 bool unpoison_reference) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100291 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
292 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000293 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100294 temps.Exclude(dest, base);
295 asm_.LoadFromOffset(kLoadWord, dest, base, offs.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100296
297 if (unpoison_reference) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100298 asm_.MaybeUnpoisonHeapReference(dest);
Artem Serov12e097c2016-08-08 15:13:26 +0100299 }
300}
301
302void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
303 FrameOffset src ATTRIBUTE_UNUSED) {
304 UNIMPLEMENTED(FATAL);
305}
306
307void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
308 ManagedRegister base ATTRIBUTE_UNUSED,
309 Offset offs ATTRIBUTE_UNUSED) {
310 UNIMPLEMENTED(FATAL);
311}
312
313void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
314 uint32_t imm,
Vladimir Markod1fa4402018-05-03 15:43:13 +0100315 ManagedRegister mscratch) {
316 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000317 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100318 temps.Exclude(scratch);
319 asm_.LoadImmediate(scratch, imm);
320 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100321}
322
323void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
324 return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
325}
326
Igor Murashkinae7ff922016-10-06 14:59:19 -0700327void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
328 ThreadOffset32 src,
329 size_t size) {
330 return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
Artem Serov12e097c2016-08-08 15:13:26 +0100331}
332
Vladimir Markod1fa4402018-05-03 15:43:13 +0100333void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
334 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000335 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100336 temps.Exclude(dest);
337 asm_.LoadFromOffset(kLoadWord, dest, tr, offs.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100338}
339
340void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
341 ThreadOffset32 thr_offs,
342 ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100343 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000344 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100345 temps.Exclude(scratch);
346 asm_.LoadFromOffset(kLoadWord, scratch, tr, thr_offs.Int32Value());
347 asm_.StoreToOffset(kStoreWord, scratch, sp, fr_offs.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100348}
349
350void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
351 FrameOffset fr_offs ATTRIBUTE_UNUSED,
352 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
353 UNIMPLEMENTED(FATAL);
354}
355
356void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
357 FrameOffset fr_offs,
358 ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100359 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000360 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100361 temps.Exclude(scratch);
362 asm_.AddConstant(scratch, sp, fr_offs.Int32Value());
363 asm_.StoreToOffset(kStoreWord, scratch, tr, thr_offs.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100364}
365
366void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
367 asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
368}
369
370void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
371 size_t size ATTRIBUTE_UNUSED) {
372 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
373}
374
375void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
376 size_t size ATTRIBUTE_UNUSED) {
377 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
378}
379
Vladimir Markod1fa4402018-05-03 15:43:13 +0100380void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst,
381 ManagedRegister msrc,
Artem Serov12e097c2016-08-08 15:13:26 +0100382 size_t size ATTRIBUTE_UNUSED) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100383 ArmManagedRegister dst = mdst.AsArm();
384 ArmManagedRegister src = msrc.AsArm();
Artem Serov12e097c2016-08-08 15:13:26 +0100385 if (!dst.Equals(src)) {
386 if (dst.IsCoreRegister()) {
387 CHECK(src.IsCoreRegister()) << src;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000388 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100389 temps.Exclude(AsVIXLRegister(dst));
390 ___ Mov(AsVIXLRegister(dst), AsVIXLRegister(src));
Artem Serov12e097c2016-08-08 15:13:26 +0100391 } else if (dst.IsDRegister()) {
Igor Murashkina3735f72016-09-14 13:36:16 -0700392 if (src.IsDRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100393 ___ Vmov(F64, AsVIXLDRegister(dst), AsVIXLDRegister(src));
Igor Murashkina3735f72016-09-14 13:36:16 -0700394 } else {
395 // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
396 CHECK(src.IsRegisterPair()) << src;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100397 ___ Vmov(AsVIXLDRegister(dst), AsVIXLRegisterPairLow(src), AsVIXLRegisterPairHigh(src));
Igor Murashkina3735f72016-09-14 13:36:16 -0700398 }
Artem Serov12e097c2016-08-08 15:13:26 +0100399 } else if (dst.IsSRegister()) {
Igor Murashkina3735f72016-09-14 13:36:16 -0700400 if (src.IsSRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100401 ___ Vmov(F32, AsVIXLSRegister(dst), AsVIXLSRegister(src));
Igor Murashkina3735f72016-09-14 13:36:16 -0700402 } else {
403 // VMOV Sn, Rn (Sn = Rn)
404 CHECK(src.IsCoreRegister()) << src;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100405 ___ Vmov(AsVIXLSRegister(dst), AsVIXLRegister(src));
Igor Murashkina3735f72016-09-14 13:36:16 -0700406 }
Artem Serov12e097c2016-08-08 15:13:26 +0100407 } else {
408 CHECK(dst.IsRegisterPair()) << dst;
409 CHECK(src.IsRegisterPair()) << src;
410 // Ensure that the first move doesn't clobber the input of the second.
411 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100412 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
413 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
Artem Serov12e097c2016-08-08 15:13:26 +0100414 } else {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100415 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
416 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
Artem Serov12e097c2016-08-08 15:13:26 +0100417 }
418 }
419 }
420}
421
422void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
423 FrameOffset src,
Vladimir Markod1fa4402018-05-03 15:43:13 +0100424 ManagedRegister mscratch,
Artem Serov12e097c2016-08-08 15:13:26 +0100425 size_t size) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100426 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Artem Serov12e097c2016-08-08 15:13:26 +0100427 CHECK(size == 4 || size == 8) << size;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000428 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100429 temps.Exclude(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100430 if (size == 4) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100431 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
432 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100433 } else if (size == 8) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100434 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
435 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
436 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value() + 4);
437 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
Artem Serov12e097c2016-08-08 15:13:26 +0100438 }
439}
440
441void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
442 ManagedRegister src_base ATTRIBUTE_UNUSED,
443 Offset src_offset ATTRIBUTE_UNUSED,
444 ManagedRegister mscratch ATTRIBUTE_UNUSED,
445 size_t size ATTRIBUTE_UNUSED) {
446 UNIMPLEMENTED(FATAL);
447}
448
449void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
450 Offset dest_offset ATTRIBUTE_UNUSED,
451 FrameOffset src ATTRIBUTE_UNUSED,
452 ManagedRegister mscratch ATTRIBUTE_UNUSED,
453 size_t size ATTRIBUTE_UNUSED) {
454 UNIMPLEMENTED(FATAL);
455}
456
457void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
458 FrameOffset src_base ATTRIBUTE_UNUSED,
459 Offset src_offset ATTRIBUTE_UNUSED,
460 ManagedRegister mscratch ATTRIBUTE_UNUSED,
461 size_t size ATTRIBUTE_UNUSED) {
462 UNIMPLEMENTED(FATAL);
463}
464
465void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
466 Offset dest_offset ATTRIBUTE_UNUSED,
467 ManagedRegister src ATTRIBUTE_UNUSED,
468 Offset src_offset ATTRIBUTE_UNUSED,
469 ManagedRegister mscratch ATTRIBUTE_UNUSED,
470 size_t size ATTRIBUTE_UNUSED) {
471 UNIMPLEMENTED(FATAL);
472}
473
474void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
475 Offset dest_offset ATTRIBUTE_UNUSED,
476 FrameOffset src ATTRIBUTE_UNUSED,
477 Offset src_offset ATTRIBUTE_UNUSED,
478 ManagedRegister scratch ATTRIBUTE_UNUSED,
479 size_t size ATTRIBUTE_UNUSED) {
480 UNIMPLEMENTED(FATAL);
481}
482
Artem Serov12e097c2016-08-08 15:13:26 +0100483void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
484 FrameOffset handle_scope_offset,
485 ManagedRegister min_reg,
486 bool null_allowed) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100487 vixl::aarch32::Register out_reg = AsVIXLRegister(mout_reg.AsArm());
488 vixl::aarch32::Register in_reg =
489 min_reg.AsArm().IsNoRegister() ? vixl::aarch32::Register() : AsVIXLRegister(min_reg.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000490 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100491 temps.Exclude(out_reg);
Artem Serov12e097c2016-08-08 15:13:26 +0100492 if (null_allowed) {
493 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
494 // the address in the handle scope holding the reference.
495 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
Vladimir Markod1fa4402018-05-03 15:43:13 +0100496 if (!in_reg.IsValid()) {
497 asm_.LoadFromOffset(kLoadWord, out_reg, sp, handle_scope_offset.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100498 in_reg = out_reg;
499 }
Scott Wakelingb77051e2016-11-21 19:46:00 +0000500
Vladimir Markod1fa4402018-05-03 15:43:13 +0100501 temps.Exclude(in_reg);
502 ___ Cmp(in_reg, 0);
Artem Serov12e097c2016-08-08 15:13:26 +0100503
Vladimir Markof0a6a1d2018-01-08 14:23:56 +0000504 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100505 if (!out_reg.Is(in_reg)) {
Artem Serov0fb37192016-12-06 18:13:40 +0000506 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
507 3 * vixl32::kMaxInstructionSizeInBytes,
508 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100509 ___ it(eq, 0xc);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100510 ___ mov(eq, out_reg, 0);
511 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
Artem Serov12e097c2016-08-08 15:13:26 +0100512 } else {
Artem Serov0fb37192016-12-06 18:13:40 +0000513 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
514 2 * vixl32::kMaxInstructionSizeInBytes,
515 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100516 ___ it(ne, 0x8);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100517 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
Artem Serov12e097c2016-08-08 15:13:26 +0100518 }
519 } else {
520 // TODO: Implement this (old arm assembler would have crashed here).
521 UNIMPLEMENTED(FATAL);
522 }
523 } else {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100524 asm_.AddConstant(out_reg, sp, handle_scope_offset.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100525 }
526}
527
528void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
529 FrameOffset handle_scope_offset,
530 ManagedRegister mscratch,
531 bool null_allowed) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100532 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000533 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100534 temps.Exclude(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100535 if (null_allowed) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100536 asm_.LoadFromOffset(kLoadWord, scratch, sp, handle_scope_offset.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100537 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
538 // the address in the handle scope holding the reference.
539 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Vladimir Markod1fa4402018-05-03 15:43:13 +0100540 ___ Cmp(scratch, 0);
Artem Serov12e097c2016-08-08 15:13:26 +0100541
Vladimir Markof0a6a1d2018-01-08 14:23:56 +0000542 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
Artem Serov0fb37192016-12-06 18:13:40 +0000543 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
544 2 * vixl32::kMaxInstructionSizeInBytes,
545 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100546 ___ it(ne, 0x8);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100547 asm_.AddConstantInIt(scratch, sp, handle_scope_offset.Int32Value(), ne);
Artem Serov12e097c2016-08-08 15:13:26 +0100548 } else {
549 // TODO: Implement this (old arm assembler would have crashed here).
550 UNIMPLEMENTED(FATAL);
551 }
552 } else {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100553 asm_.AddConstant(scratch, sp, handle_scope_offset.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100554 }
Vladimir Markod1fa4402018-05-03 15:43:13 +0100555 asm_.StoreToOffset(kStoreWord, scratch, sp, out_off.Int32Value());
Artem Serov12e097c2016-08-08 15:13:26 +0100556}
557
558void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
559 ManagedRegister mout_reg ATTRIBUTE_UNUSED,
560 ManagedRegister min_reg ATTRIBUTE_UNUSED) {
561 UNIMPLEMENTED(FATAL);
562}
563
564void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
565 bool could_be_null ATTRIBUTE_UNUSED) {
566 // TODO: not validating references.
567}
568
569void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
570 bool could_be_null ATTRIBUTE_UNUSED) {
571 // TODO: not validating references.
572}
573
574void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
575 Offset offset,
576 ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100577 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
578 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000579 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100580 temps.Exclude(scratch);
581 asm_.LoadFromOffset(kLoadWord, scratch, base, offset.Int32Value());
582 ___ Blx(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100583 // TODO: place reference map on call.
584}
585
586void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100587 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000588 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100589 temps.Exclude(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100590 // Call *(*(SP + base) + offset)
Vladimir Markod1fa4402018-05-03 15:43:13 +0100591 asm_.LoadFromOffset(kLoadWord, scratch, sp, base.Int32Value());
592 asm_.LoadFromOffset(kLoadWord, scratch, scratch, offset.Int32Value());
593 ___ Blx(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100594 // TODO: place reference map on call
595}
596
597void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
598 ManagedRegister scratch ATTRIBUTE_UNUSED) {
599 UNIMPLEMENTED(FATAL);
600}
601
602void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
Artem Serov6287c232016-11-29 13:31:33 +0000603 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100604 temps.Exclude(AsVIXLRegister(mtr.AsArm()));
605 ___ Mov(AsVIXLRegister(mtr.AsArm()), tr);
Artem Serov12e097c2016-08-08 15:13:26 +0100606}
607
608void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
609 ManagedRegister scratch ATTRIBUTE_UNUSED) {
610 asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
611}
612
Vladimir Markod1fa4402018-05-03 15:43:13 +0100613void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
Artem Serov12e097c2016-08-08 15:13:26 +0100614 CHECK_ALIGNED(stack_adjust, kStackAlignment);
Vladimir Markod1fa4402018-05-03 15:43:13 +0100615 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000616 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100617 temps.Exclude(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100618 exception_blocks_.emplace_back(
Vladimir Markod1fa4402018-05-03 15:43:13 +0100619 new ArmVIXLJNIMacroAssembler::ArmException(mscratch.AsArm(), stack_adjust));
Artem Serov12e097c2016-08-08 15:13:26 +0100620 asm_.LoadFromOffset(kLoadWord,
Vladimir Markod1fa4402018-05-03 15:43:13 +0100621 scratch,
Artem Serov12e097c2016-08-08 15:13:26 +0100622 tr,
623 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
624
Vladimir Markod1fa4402018-05-03 15:43:13 +0100625 ___ Cmp(scratch, 0);
Artem Serov672b9c12017-12-05 18:04:07 +0000626 vixl32::Label* label = exception_blocks_.back()->Entry();
627 ___ BPreferNear(ne, label);
Artem Serov12e097c2016-08-08 15:13:26 +0100628 // TODO: think about using CBNZ here.
629}
630
Igor Murashkinae7ff922016-10-06 14:59:19 -0700631std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
632 return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
633}
634
635void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
636 CHECK(label != nullptr);
637 ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
638}
639
640void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
641 JNIMacroUnaryCondition condition,
Vladimir Markod1fa4402018-05-03 15:43:13 +0100642 ManagedRegister mtest) {
Igor Murashkinae7ff922016-10-06 14:59:19 -0700643 CHECK(label != nullptr);
644
Vladimir Markod1fa4402018-05-03 15:43:13 +0100645 vixl::aarch32::Register test = AsVIXLRegister(mtest.AsArm());
Artem Serov6287c232016-11-29 13:31:33 +0000646 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100647 temps.Exclude(test);
Igor Murashkinae7ff922016-10-06 14:59:19 -0700648 switch (condition) {
649 case JNIMacroUnaryCondition::kZero:
Vladimir Markod1fa4402018-05-03 15:43:13 +0100650 ___ CompareAndBranchIfZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700651 break;
652 case JNIMacroUnaryCondition::kNotZero:
Vladimir Markod1fa4402018-05-03 15:43:13 +0100653 ___ CompareAndBranchIfNonZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700654 break;
655 default:
656 LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
657 UNREACHABLE();
658 }
659}
660
661void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
662 CHECK(label != nullptr);
663 ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
664}
665
Artem Serov12e097c2016-08-08 15:13:26 +0100666void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
667 ArmVIXLJNIMacroAssembler::ArmException* exception) {
668 ___ Bind(exception->Entry());
669 if (exception->stack_adjust_ != 0) { // Fix up the frame.
670 DecreaseFrameSize(exception->stack_adjust_);
671 }
Scott Wakelingb77051e2016-11-21 19:46:00 +0000672
Vladimir Markod1fa4402018-05-03 15:43:13 +0100673 vixl::aarch32::Register scratch = AsVIXLRegister(exception->scratch_);
Scott Wakelingb77051e2016-11-21 19:46:00 +0000674 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100675 temps.Exclude(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100676 // Pass exception object as argument.
677 // Don't care about preserving r0 as this won't return.
Vladimir Markod1fa4402018-05-03 15:43:13 +0100678 ___ Mov(r0, scratch);
679 temps.Include(scratch);
Artem Serov12e097c2016-08-08 15:13:26 +0100680 // TODO: check that exception->scratch_ is dead by this point.
Artem Serov12e097c2016-08-08 15:13:26 +0100681 vixl32::Register temp = temps.Acquire();
682 ___ Ldr(temp,
683 MemOperand(tr,
684 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
685 ___ Blx(temp);
686}
687
688void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
689 UNIMPLEMENTED(FATAL);
690}
691
692void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
693 dest,
694 vixl32::Register base,
695 int32_t offset,
696 size_t size) {
697 if (dest.IsNoRegister()) {
698 CHECK_EQ(0u, size) << dest;
699 } else if (dest.IsCoreRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100700 vixl::aarch32::Register dst = AsVIXLRegister(dest);
701 CHECK(!dst.Is(sp)) << dest;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700702
Scott Wakelingb77051e2016-11-21 19:46:00 +0000703 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Vladimir Markod1fa4402018-05-03 15:43:13 +0100704 temps.Exclude(dst);
Scott Wakelingb77051e2016-11-21 19:46:00 +0000705
Igor Murashkinae7ff922016-10-06 14:59:19 -0700706 if (size == 1u) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100707 ___ Ldrb(dst, MemOperand(base, offset));
Igor Murashkinae7ff922016-10-06 14:59:19 -0700708 } else {
709 CHECK_EQ(4u, size) << dest;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100710 ___ Ldr(dst, MemOperand(base, offset));
Igor Murashkinae7ff922016-10-06 14:59:19 -0700711 }
Artem Serov12e097c2016-08-08 15:13:26 +0100712 } else if (dest.IsRegisterPair()) {
713 CHECK_EQ(8u, size) << dest;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100714 ___ Ldr(AsVIXLRegisterPairLow(dest), MemOperand(base, offset));
715 ___ Ldr(AsVIXLRegisterPairHigh(dest), MemOperand(base, offset + 4));
Artem Serov12e097c2016-08-08 15:13:26 +0100716 } else if (dest.IsSRegister()) {
Vladimir Markod1fa4402018-05-03 15:43:13 +0100717 ___ Vldr(AsVIXLSRegister(dest), MemOperand(base, offset));
Artem Serov12e097c2016-08-08 15:13:26 +0100718 } else {
719 CHECK(dest.IsDRegister()) << dest;
Vladimir Markod1fa4402018-05-03 15:43:13 +0100720 ___ Vldr(AsVIXLDRegister(dest), MemOperand(base, offset));
Artem Serov12e097c2016-08-08 15:13:26 +0100721 }
722}
723
724} // namespace arm
725} // namespace art