blob: ed57ca68e2cc9b229de19568e75a81837a6991ca [file] [log] [blame]
Artem Serov12e097c2016-08-08 15:13:26 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070017#include "jni_macro_assembler_arm_vixl.h"
18
Artem Serov12e097c2016-08-08 15:13:26 +010019#include <iostream>
20#include <type_traits>
21
Artem Serov12e097c2016-08-08 15:13:26 +010022#include "entrypoints/quick/quick_entrypoints.h"
23#include "thread.h"
24
25using namespace vixl::aarch32; // NOLINT(build/namespaces)
26namespace vixl32 = vixl::aarch32;
27
Artem Serov0fb37192016-12-06 18:13:40 +000028using vixl::ExactAssemblyScope;
29using vixl::CodeBufferCheckScope;
30
Artem Serov12e097c2016-08-08 15:13:26 +010031namespace art {
32namespace arm {
33
34#ifdef ___
35#error "ARM Assembler macro already defined."
36#else
37#define ___ asm_.GetVIXLAssembler()->
38#endif
39
40void ArmVIXLJNIMacroAssembler::FinalizeCode() {
41 for (const std::unique_ptr<
42 ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
43 EmitExceptionPoll(exception.get());
44 }
45 asm_.FinalizeCode();
46}
47
48static dwarf::Reg DWARFReg(vixl32::Register reg) {
49 return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
50}
51
52static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
53 return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
54}
55
Mathieu Chartier6beced42016-11-15 15:51:31 -080056static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
Artem Serov12e097c2016-08-08 15:13:26 +010057
58void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
59 ManagedRegister method_reg,
60 ArrayRef<const ManagedRegister> callee_save_regs,
61 const ManagedRegisterEntrySpills& entry_spills) {
62 CHECK_ALIGNED(frame_size, kStackAlignment);
63 CHECK(r0.Is(method_reg.AsArm().AsVIXLRegister()));
64
65 // Push callee saves and link register.
66 RegList core_spill_mask = 1 << LR;
67 uint32_t fp_spill_mask = 0;
68 for (const ManagedRegister& reg : callee_save_regs) {
69 if (reg.AsArm().IsCoreRegister()) {
70 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
71 } else {
72 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
73 }
74 }
75 ___ Push(RegisterList(core_spill_mask));
76 cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
77 cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
78 if (fp_spill_mask != 0) {
79 uint32_t first = CTZ(fp_spill_mask);
Artem Serov12e097c2016-08-08 15:13:26 +010080
81 // Check that list is contiguous.
82 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
83
Anton Kirilov52d0fce2016-09-02 20:55:46 +010084 ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
Artem Serov12e097c2016-08-08 15:13:26 +010085 cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
86 cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
87 }
88
89 // Increase frame to required size.
90 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
91 // Must at least have space for Method*.
92 CHECK_GT(frame_size, pushed_values * kFramePointerSize);
93 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
94
95 // Write out Method*.
96 asm_.StoreToOffset(kStoreWord, r0, sp, 0);
97
98 // Write out entry spills.
99 int32_t offset = frame_size + kFramePointerSize;
100 for (size_t i = 0; i < entry_spills.size(); ++i) {
101 ArmManagedRegister reg = entry_spills.at(i).AsArm();
102 if (reg.IsNoRegister()) {
103 // only increment stack offset.
104 ManagedRegisterSpill spill = entry_spills.at(i);
105 offset += spill.getSize();
106 } else if (reg.IsCoreRegister()) {
107 asm_.StoreToOffset(kStoreWord, reg.AsVIXLRegister(), sp, offset);
108 offset += 4;
109 } else if (reg.IsSRegister()) {
110 asm_.StoreSToOffset(reg.AsVIXLSRegister(), sp, offset);
111 offset += 4;
112 } else if (reg.IsDRegister()) {
113 asm_.StoreDToOffset(reg.AsVIXLDRegister(), sp, offset);
114 offset += 8;
115 }
116 }
117}
118
119void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
120 ArrayRef<const ManagedRegister> callee_save_regs) {
121 CHECK_ALIGNED(frame_size, kStackAlignment);
122 cfi().RememberState();
123
Roland Levillain6d729a72017-06-30 18:34:01 +0100124 // Compute callee saves to pop and LR.
125 RegList core_spill_mask = 1 << LR;
Artem Serov12e097c2016-08-08 15:13:26 +0100126 uint32_t fp_spill_mask = 0;
127 for (const ManagedRegister& reg : callee_save_regs) {
128 if (reg.AsArm().IsCoreRegister()) {
129 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
130 } else {
131 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
132 }
133 }
134
135 // Decrease frame to start of callee saves.
136 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
137 CHECK_GT(frame_size, pop_values * kFramePointerSize);
138 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
139
Roland Levillain6d729a72017-06-30 18:34:01 +0100140 // Pop FP callee saves.
Artem Serov12e097c2016-08-08 15:13:26 +0100141 if (fp_spill_mask != 0) {
142 uint32_t first = CTZ(fp_spill_mask);
Artem Serov12e097c2016-08-08 15:13:26 +0100143 // Check that list is contiguous.
144 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
145
Anton Kirilov52d0fce2016-09-02 20:55:46 +0100146 ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
Artem Serov12e097c2016-08-08 15:13:26 +0100147 cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
148 cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
149 }
150
Roland Levillain6d729a72017-06-30 18:34:01 +0100151 // Pop core callee saves and LR.
Artem Serov12e097c2016-08-08 15:13:26 +0100152 ___ Pop(RegisterList(core_spill_mask));
153
Roland Levillain6d729a72017-06-30 18:34:01 +0100154 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
155 // Refresh Mark Register.
156 // TODO: Refresh MR only if suspend is taken.
157 ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
158 }
159
160 // Return to LR.
161 ___ Bx(vixl32::lr);
162
Artem Serov12e097c2016-08-08 15:13:26 +0100163 // The CFI should be restored for any code that follows the exit block.
164 cfi().RestoreState();
165 cfi().DefCFAOffset(frame_size);
166}
167
168
169void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
170 asm_.AddConstant(sp, -adjust);
171 cfi().AdjustCFAOffset(adjust);
172}
173
174void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
175 asm_.AddConstant(sp, adjust);
176 cfi().AdjustCFAOffset(-adjust);
177}
178
179void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
180 ArmManagedRegister src = m_src.AsArm();
181 if (src.IsNoRegister()) {
182 CHECK_EQ(0u, size);
183 } else if (src.IsCoreRegister()) {
184 CHECK_EQ(4u, size);
Scott Wakelingb77051e2016-11-21 19:46:00 +0000185 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
186 temps.Exclude(src.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100187 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
188 } else if (src.IsRegisterPair()) {
189 CHECK_EQ(8u, size);
190 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairLow(), sp, dest.Int32Value());
191 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairHigh(), sp, dest.Int32Value() + 4);
192 } else if (src.IsSRegister()) {
193 CHECK_EQ(4u, size);
194 asm_.StoreSToOffset(src.AsVIXLSRegister(), sp, dest.Int32Value());
195 } else {
196 CHECK_EQ(8u, size);
197 CHECK(src.IsDRegister()) << src;
198 asm_.StoreDToOffset(src.AsVIXLDRegister(), sp, dest.Int32Value());
199 }
200}
201
202void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
203 ArmManagedRegister src = msrc.AsArm();
204 CHECK(src.IsCoreRegister()) << src;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000205 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
206 temps.Exclude(src.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100207 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
208}
209
210void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
211 ArmManagedRegister src = msrc.AsArm();
212 CHECK(src.IsCoreRegister()) << src;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000213 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
214 temps.Exclude(src.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100215 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
216}
217
218void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
219 ManagedRegister msrc,
220 FrameOffset in_off,
221 ManagedRegister mscratch) {
222 ArmManagedRegister src = msrc.AsArm();
223 ArmManagedRegister scratch = mscratch.AsArm();
224 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000225 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
226 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100227 asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, in_off.Int32Value());
228 asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value() + 4);
229}
230
231void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
232 FrameOffset src,
233 ManagedRegister mscratch) {
234 ArmManagedRegister scratch = mscratch.AsArm();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000235 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
236 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100237 asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, src.Int32Value());
238 asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value());
239}
240
241void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest,
242 ManagedRegister base,
243 MemberOffset offs,
244 bool unpoison_reference) {
245 ArmManagedRegister dst = dest.AsArm();
246 CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000247 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
248 temps.Exclude(dst.AsVIXLRegister(), base.AsArm().AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100249 asm_.LoadFromOffset(kLoadWord,
250 dst.AsVIXLRegister(),
251 base.AsArm().AsVIXLRegister(),
252 offs.Int32Value());
253
254 if (unpoison_reference) {
255 asm_.MaybeUnpoisonHeapReference(dst.AsVIXLRegister());
256 }
257}
258
259void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
260 FrameOffset src ATTRIBUTE_UNUSED) {
261 UNIMPLEMENTED(FATAL);
262}
263
264void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
265 ManagedRegister base ATTRIBUTE_UNUSED,
266 Offset offs ATTRIBUTE_UNUSED) {
267 UNIMPLEMENTED(FATAL);
268}
269
270void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
271 uint32_t imm,
272 ManagedRegister scratch) {
273 ArmManagedRegister mscratch = scratch.AsArm();
274 CHECK(mscratch.IsCoreRegister()) << mscratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000275 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
276 temps.Exclude(mscratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100277 asm_.LoadImmediate(mscratch.AsVIXLRegister(), imm);
278 asm_.StoreToOffset(kStoreWord, mscratch.AsVIXLRegister(), sp, dest.Int32Value());
279}
280
281void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
282 return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
283}
284
Igor Murashkinae7ff922016-10-06 14:59:19 -0700285void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
286 ThreadOffset32 src,
287 size_t size) {
288 return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
Artem Serov12e097c2016-08-08 15:13:26 +0100289}
290
291void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset32 offs) {
292 ArmManagedRegister dst = m_dst.AsArm();
293 CHECK(dst.IsCoreRegister()) << dst;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000294 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
295 temps.Exclude(dst.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100296 asm_.LoadFromOffset(kLoadWord, dst.AsVIXLRegister(), tr, offs.Int32Value());
297}
298
299void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
300 ThreadOffset32 thr_offs,
301 ManagedRegister mscratch) {
302 ArmManagedRegister scratch = mscratch.AsArm();
303 CHECK(scratch.IsCoreRegister()) << scratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000304 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
305 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100306 asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
307 asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
308}
309
310void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
311 FrameOffset fr_offs ATTRIBUTE_UNUSED,
312 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
313 UNIMPLEMENTED(FATAL);
314}
315
316void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
317 FrameOffset fr_offs,
318 ManagedRegister mscratch) {
319 ArmManagedRegister scratch = mscratch.AsArm();
320 CHECK(scratch.IsCoreRegister()) << scratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000321 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
322 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100323 asm_.AddConstant(scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
324 asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
325}
326
327void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
328 asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
329}
330
331void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
332 size_t size ATTRIBUTE_UNUSED) {
333 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
334}
335
336void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
337 size_t size ATTRIBUTE_UNUSED) {
338 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
339}
340
341void ArmVIXLJNIMacroAssembler::Move(ManagedRegister m_dst,
342 ManagedRegister m_src,
343 size_t size ATTRIBUTE_UNUSED) {
344 ArmManagedRegister dst = m_dst.AsArm();
345 ArmManagedRegister src = m_src.AsArm();
346 if (!dst.Equals(src)) {
347 if (dst.IsCoreRegister()) {
348 CHECK(src.IsCoreRegister()) << src;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000349 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
350 temps.Exclude(dst.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100351 ___ Mov(dst.AsVIXLRegister(), src.AsVIXLRegister());
352 } else if (dst.IsDRegister()) {
Igor Murashkina3735f72016-09-14 13:36:16 -0700353 if (src.IsDRegister()) {
354 ___ Vmov(F64, dst.AsVIXLDRegister(), src.AsVIXLDRegister());
355 } else {
356 // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
357 CHECK(src.IsRegisterPair()) << src;
358 ___ Vmov(dst.AsVIXLDRegister(), src.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairHigh());
359 }
Artem Serov12e097c2016-08-08 15:13:26 +0100360 } else if (dst.IsSRegister()) {
Igor Murashkina3735f72016-09-14 13:36:16 -0700361 if (src.IsSRegister()) {
362 ___ Vmov(F32, dst.AsVIXLSRegister(), src.AsVIXLSRegister());
363 } else {
364 // VMOV Sn, Rn (Sn = Rn)
365 CHECK(src.IsCoreRegister()) << src;
366 ___ Vmov(dst.AsVIXLSRegister(), src.AsVIXLRegister());
367 }
Artem Serov12e097c2016-08-08 15:13:26 +0100368 } else {
369 CHECK(dst.IsRegisterPair()) << dst;
370 CHECK(src.IsRegisterPair()) << src;
371 // Ensure that the first move doesn't clobber the input of the second.
372 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
373 ___ Mov(dst.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairLow());
374 ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
375 } else {
376 ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
377 ___ Mov(dst.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairLow());
378 }
379 }
380 }
381}
382
383void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
384 FrameOffset src,
385 ManagedRegister scratch,
386 size_t size) {
387 ArmManagedRegister temp = scratch.AsArm();
388 CHECK(temp.IsCoreRegister()) << temp;
389 CHECK(size == 4 || size == 8) << size;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000390 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
391 temps.Exclude(temp.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100392 if (size == 4) {
393 asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
394 asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
395 } else if (size == 8) {
396 asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
397 asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
398 asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value() + 4);
399 asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value() + 4);
400 }
401}
402
403void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
404 ManagedRegister src_base ATTRIBUTE_UNUSED,
405 Offset src_offset ATTRIBUTE_UNUSED,
406 ManagedRegister mscratch ATTRIBUTE_UNUSED,
407 size_t size ATTRIBUTE_UNUSED) {
408 UNIMPLEMENTED(FATAL);
409}
410
411void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
412 Offset dest_offset ATTRIBUTE_UNUSED,
413 FrameOffset src ATTRIBUTE_UNUSED,
414 ManagedRegister mscratch ATTRIBUTE_UNUSED,
415 size_t size ATTRIBUTE_UNUSED) {
416 UNIMPLEMENTED(FATAL);
417}
418
419void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
420 FrameOffset src_base ATTRIBUTE_UNUSED,
421 Offset src_offset ATTRIBUTE_UNUSED,
422 ManagedRegister mscratch ATTRIBUTE_UNUSED,
423 size_t size ATTRIBUTE_UNUSED) {
424 UNIMPLEMENTED(FATAL);
425}
426
427void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
428 Offset dest_offset ATTRIBUTE_UNUSED,
429 ManagedRegister src ATTRIBUTE_UNUSED,
430 Offset src_offset ATTRIBUTE_UNUSED,
431 ManagedRegister mscratch ATTRIBUTE_UNUSED,
432 size_t size ATTRIBUTE_UNUSED) {
433 UNIMPLEMENTED(FATAL);
434}
435
436void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
437 Offset dest_offset ATTRIBUTE_UNUSED,
438 FrameOffset src ATTRIBUTE_UNUSED,
439 Offset src_offset ATTRIBUTE_UNUSED,
440 ManagedRegister scratch ATTRIBUTE_UNUSED,
441 size_t size ATTRIBUTE_UNUSED) {
442 UNIMPLEMENTED(FATAL);
443}
444
Artem Serov12e097c2016-08-08 15:13:26 +0100445void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
446 FrameOffset handle_scope_offset,
447 ManagedRegister min_reg,
448 bool null_allowed) {
449 ArmManagedRegister out_reg = mout_reg.AsArm();
450 ArmManagedRegister in_reg = min_reg.AsArm();
451 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
452 CHECK(out_reg.IsCoreRegister()) << out_reg;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000453 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
454 temps.Exclude(out_reg.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100455 if (null_allowed) {
456 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
457 // the address in the handle scope holding the reference.
458 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
459 if (in_reg.IsNoRegister()) {
460 asm_.LoadFromOffset(kLoadWord,
461 out_reg.AsVIXLRegister(),
462 sp,
463 handle_scope_offset.Int32Value());
464 in_reg = out_reg;
465 }
Scott Wakelingb77051e2016-11-21 19:46:00 +0000466
467 temps.Exclude(in_reg.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100468 ___ Cmp(in_reg.AsVIXLRegister(), 0);
469
470 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
471 if (!out_reg.Equals(in_reg)) {
Artem Serov0fb37192016-12-06 18:13:40 +0000472 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
473 3 * vixl32::kMaxInstructionSizeInBytes,
474 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100475 ___ it(eq, 0xc);
476 ___ mov(eq, out_reg.AsVIXLRegister(), 0);
477 asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
478 } else {
Artem Serov0fb37192016-12-06 18:13:40 +0000479 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
480 2 * vixl32::kMaxInstructionSizeInBytes,
481 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100482 ___ it(ne, 0x8);
483 asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
484 }
485 } else {
486 // TODO: Implement this (old arm assembler would have crashed here).
487 UNIMPLEMENTED(FATAL);
488 }
489 } else {
490 asm_.AddConstant(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
491 }
492}
493
494void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
495 FrameOffset handle_scope_offset,
496 ManagedRegister mscratch,
497 bool null_allowed) {
498 ArmManagedRegister scratch = mscratch.AsArm();
499 CHECK(scratch.IsCoreRegister()) << scratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000500 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
501 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100502 if (null_allowed) {
503 asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
504 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
505 // the address in the handle scope holding the reference.
506 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
507 ___ Cmp(scratch.AsVIXLRegister(), 0);
508
509 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
Artem Serov0fb37192016-12-06 18:13:40 +0000510 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
511 2 * vixl32::kMaxInstructionSizeInBytes,
512 CodeBufferCheckScope::kMaximumSize);
Artem Serov12e097c2016-08-08 15:13:26 +0100513 ___ it(ne, 0x8);
514 asm_.AddConstantInIt(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
515 } else {
516 // TODO: Implement this (old arm assembler would have crashed here).
517 UNIMPLEMENTED(FATAL);
518 }
519 } else {
520 asm_.AddConstant(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
521 }
522 asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, out_off.Int32Value());
523}
524
525void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
526 ManagedRegister mout_reg ATTRIBUTE_UNUSED,
527 ManagedRegister min_reg ATTRIBUTE_UNUSED) {
528 UNIMPLEMENTED(FATAL);
529}
530
531void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
532 bool could_be_null ATTRIBUTE_UNUSED) {
533 // TODO: not validating references.
534}
535
536void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
537 bool could_be_null ATTRIBUTE_UNUSED) {
538 // TODO: not validating references.
539}
540
541void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
542 Offset offset,
543 ManagedRegister mscratch) {
544 ArmManagedRegister base = mbase.AsArm();
545 ArmManagedRegister scratch = mscratch.AsArm();
546 CHECK(base.IsCoreRegister()) << base;
547 CHECK(scratch.IsCoreRegister()) << scratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000548 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
549 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100550 asm_.LoadFromOffset(kLoadWord,
551 scratch.AsVIXLRegister(),
552 base.AsVIXLRegister(),
553 offset.Int32Value());
554 ___ Blx(scratch.AsVIXLRegister());
555 // TODO: place reference map on call.
556}
557
558void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
559 ArmManagedRegister scratch = mscratch.AsArm();
560 CHECK(scratch.IsCoreRegister()) << scratch;
Scott Wakelingb77051e2016-11-21 19:46:00 +0000561 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
562 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100563 // Call *(*(SP + base) + offset)
564 asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, base.Int32Value());
565 asm_.LoadFromOffset(kLoadWord,
566 scratch.AsVIXLRegister(),
567 scratch.AsVIXLRegister(),
568 offset.Int32Value());
569 ___ Blx(scratch.AsVIXLRegister());
570 // TODO: place reference map on call
571}
572
573void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
574 ManagedRegister scratch ATTRIBUTE_UNUSED) {
575 UNIMPLEMENTED(FATAL);
576}
577
578void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
Artem Serov6287c232016-11-29 13:31:33 +0000579 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
580 temps.Exclude(mtr.AsArm().AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100581 ___ Mov(mtr.AsArm().AsVIXLRegister(), tr);
582}
583
584void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
585 ManagedRegister scratch ATTRIBUTE_UNUSED) {
586 asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
587}
588
589void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
590 CHECK_ALIGNED(stack_adjust, kStackAlignment);
591 ArmManagedRegister scratch = m_scratch.AsArm();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000592 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
593 temps.Exclude(scratch.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100594 exception_blocks_.emplace_back(
595 new ArmVIXLJNIMacroAssembler::ArmException(scratch, stack_adjust));
596 asm_.LoadFromOffset(kLoadWord,
597 scratch.AsVIXLRegister(),
598 tr,
599 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
600
601 ___ Cmp(scratch.AsVIXLRegister(), 0);
602 {
Artem Serov0fb37192016-12-06 18:13:40 +0000603 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
604 vixl32::kMaxInstructionSizeInBytes,
605 CodeBufferCheckScope::kMaximumSize);
Artem Serov8f840f82016-12-15 17:56:27 +0000606 vixl32::Label* label = exception_blocks_.back()->Entry();
607 ___ b(ne, Narrow, label);
608 ___ AddBranchLabel(label);
Artem Serov12e097c2016-08-08 15:13:26 +0100609 }
610 // TODO: think about using CBNZ here.
611}
612
Igor Murashkinae7ff922016-10-06 14:59:19 -0700613std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
614 return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
615}
616
617void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
618 CHECK(label != nullptr);
619 ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
620}
621
622void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
623 JNIMacroUnaryCondition condition,
624 ManagedRegister test) {
625 CHECK(label != nullptr);
626
Artem Serov6287c232016-11-29 13:31:33 +0000627 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
628 temps.Exclude(test.AsArm().AsVIXLRegister());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700629 switch (condition) {
630 case JNIMacroUnaryCondition::kZero:
xueliang.zhongf51bc622016-11-04 09:23:32 +0000631 ___ CompareAndBranchIfZero(test.AsArm().AsVIXLRegister(),
632 ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700633 break;
634 case JNIMacroUnaryCondition::kNotZero:
xueliang.zhongf51bc622016-11-04 09:23:32 +0000635 ___ CompareAndBranchIfNonZero(test.AsArm().AsVIXLRegister(),
636 ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
Igor Murashkinae7ff922016-10-06 14:59:19 -0700637 break;
638 default:
639 LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
640 UNREACHABLE();
641 }
642}
643
644void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
645 CHECK(label != nullptr);
646 ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
647}
648
Artem Serov12e097c2016-08-08 15:13:26 +0100649void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
650 ArmVIXLJNIMacroAssembler::ArmException* exception) {
651 ___ Bind(exception->Entry());
652 if (exception->stack_adjust_ != 0) { // Fix up the frame.
653 DecreaseFrameSize(exception->stack_adjust_);
654 }
Scott Wakelingb77051e2016-11-21 19:46:00 +0000655
656 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
657 temps.Exclude(exception->scratch_.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100658 // Pass exception object as argument.
659 // Don't care about preserving r0 as this won't return.
660 ___ Mov(r0, exception->scratch_.AsVIXLRegister());
Scott Wakelingb77051e2016-11-21 19:46:00 +0000661 temps.Include(exception->scratch_.AsVIXLRegister());
Artem Serov12e097c2016-08-08 15:13:26 +0100662 // TODO: check that exception->scratch_ is dead by this point.
Artem Serov12e097c2016-08-08 15:13:26 +0100663 vixl32::Register temp = temps.Acquire();
664 ___ Ldr(temp,
665 MemOperand(tr,
666 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
667 ___ Blx(temp);
668}
669
670void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
671 UNIMPLEMENTED(FATAL);
672}
673
674void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
675 dest,
676 vixl32::Register base,
677 int32_t offset,
678 size_t size) {
679 if (dest.IsNoRegister()) {
680 CHECK_EQ(0u, size) << dest;
681 } else if (dest.IsCoreRegister()) {
Artem Serov12e097c2016-08-08 15:13:26 +0100682 CHECK(!dest.AsVIXLRegister().Is(sp)) << dest;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700683
Scott Wakelingb77051e2016-11-21 19:46:00 +0000684 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
685 temps.Exclude(dest.AsVIXLRegister());
686
Igor Murashkinae7ff922016-10-06 14:59:19 -0700687 if (size == 1u) {
688 ___ Ldrb(dest.AsVIXLRegister(), MemOperand(base, offset));
689 } else {
690 CHECK_EQ(4u, size) << dest;
691 ___ Ldr(dest.AsVIXLRegister(), MemOperand(base, offset));
692 }
Artem Serov12e097c2016-08-08 15:13:26 +0100693 } else if (dest.IsRegisterPair()) {
694 CHECK_EQ(8u, size) << dest;
695 ___ Ldr(dest.AsVIXLRegisterPairLow(), MemOperand(base, offset));
696 ___ Ldr(dest.AsVIXLRegisterPairHigh(), MemOperand(base, offset + 4));
697 } else if (dest.IsSRegister()) {
698 ___ Vldr(dest.AsVIXLSRegister(), MemOperand(base, offset));
699 } else {
700 CHECK(dest.IsDRegister()) << dest;
701 ___ Vldr(dest.AsVIXLDRegister(), MemOperand(base, offset));
702 }
703}
704
705} // namespace arm
706} // namespace art