blob: 9f2027f0a2ce0df0e19c6e1ad3956f3b0ae102d3 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
Scott Wakeling97c72b72016-06-24 16:19:36 +010023using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Ramesba9388c2014-08-22 14:08:36 +010024
Serban Constantinescued8dd492014-02-11 14:15:10 +000025namespace art {
26namespace arm64 {
27
28#ifdef ___
29#error "ARM64 Assembler macro already defined."
30#else
31#define ___ vixl_masm_->
32#endif
33
Vladimir Markocf93a5c2015-06-16 11:33:24 +000034void Arm64Assembler::FinalizeCode() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +010035 for (const std::unique_ptr<Arm64Exception>& exception : exception_blocks_) {
36 EmitExceptionPoll(exception.get());
Serban Constantinescued8dd492014-02-11 14:15:10 +000037 }
38 ___ FinalizeCode();
39}
40
41size_t Arm64Assembler::CodeSize() const {
Scott Wakeling97c72b72016-06-24 16:19:36 +010042 return vixl_masm_->GetBufferCapacity() - vixl_masm_->GetRemainingBufferSpace();
Serban Constantinescued8dd492014-02-11 14:15:10 +000043}
44
Alexandre Rameseb7b7392015-06-19 14:47:01 +010045const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const {
46 return vixl_masm_->GetStartAddress<uint8_t*>();
47}
48
Serban Constantinescued8dd492014-02-11 14:15:10 +000049void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
50 // Copy the instructions from the buffer.
Alexandre Ramescee75242014-10-08 18:41:21 +010051 MemoryRegion from(vixl_masm_->GetStartAddress<void*>(), CodeSize());
Serban Constantinescued8dd492014-02-11 14:15:10 +000052 region.CopyFrom(0, from);
53}
54
55void Arm64Assembler::GetCurrentThread(ManagedRegister tr) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010056 ___ Mov(reg_x(tr.AsArm64().AsXRegister()), reg_x(TR));
Serban Constantinescued8dd492014-02-11 14:15:10 +000057}
58
59void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010060 StoreToOffset(TR, SP, offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +000061}
62
63// See Arm64 PCS Section 5.2.2.1.
64void Arm64Assembler::IncreaseFrameSize(size_t adjust) {
65 CHECK_ALIGNED(adjust, kStackAlignment);
66 AddConstant(SP, -adjust);
David Srbeckydd973932015-04-07 20:29:48 +010067 cfi().AdjustCFAOffset(adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000068}
69
70// See Arm64 PCS Section 5.2.2.1.
71void Arm64Assembler::DecreaseFrameSize(size_t adjust) {
72 CHECK_ALIGNED(adjust, kStackAlignment);
73 AddConstant(SP, adjust);
David Srbeckydd973932015-04-07 20:29:48 +010074 cfi().AdjustCFAOffset(-adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000075}
76
Alexandre Rames37c92df2014-10-17 14:35:27 +010077void Arm64Assembler::AddConstant(XRegister rd, int32_t value, Condition cond) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000078 AddConstant(rd, rd, value, cond);
79}
80
Alexandre Rames37c92df2014-10-17 14:35:27 +010081void Arm64Assembler::AddConstant(XRegister rd, XRegister rn, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +000082 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +010083 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000084 // VIXL macro-assembler handles all variants.
85 ___ Add(reg_x(rd), reg_x(rn), value);
86 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010087 // temp = rd + value
88 // rd = cond ? temp : rn
Scott Wakeling97c72b72016-06-24 16:19:36 +010089 UseScratchRegisterScope temps(vixl_masm_);
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010090 temps.Exclude(reg_x(rd), reg_x(rn));
Scott Wakeling97c72b72016-06-24 16:19:36 +010091 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010092 ___ Add(temp, reg_x(rn), value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +010093 ___ Csel(reg_x(rd), temp, reg_x(rd), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +000094 }
95}
96
97void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source,
Alexandre Rames37c92df2014-10-17 14:35:27 +010098 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000099 switch (type) {
100 case kStoreByte:
101 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset));
102 break;
103 case kStoreHalfword:
104 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset));
105 break;
106 case kStoreWord:
107 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset));
108 break;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 }
112}
113
Alexandre Rames37c92df2014-10-17 14:35:27 +0100114void Arm64Assembler::StoreToOffset(XRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000115 CHECK_NE(source, SP);
116 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset));
117}
118
Alexandre Rames37c92df2014-10-17 14:35:27 +0100119void Arm64Assembler::StoreSToOffset(SRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000120 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset));
121}
122
Alexandre Rames37c92df2014-10-17 14:35:27 +0100123void Arm64Assembler::StoreDToOffset(DRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000124 ___ Str(reg_d(source), MEM_OP(reg_x(base), offset));
125}
126
127void Arm64Assembler::Store(FrameOffset offs, ManagedRegister m_src, size_t size) {
128 Arm64ManagedRegister src = m_src.AsArm64();
129 if (src.IsNoRegister()) {
130 CHECK_EQ(0u, size);
131 } else if (src.IsWRegister()) {
132 CHECK_EQ(4u, size);
133 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100134 } else if (src.IsXRegister()) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000135 CHECK_EQ(8u, size);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100136 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000137 } else if (src.IsSRegister()) {
138 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value());
139 } else {
140 CHECK(src.IsDRegister()) << src;
141 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value());
142 }
143}
144
145void Arm64Assembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
146 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100147 CHECK(src.IsXRegister()) << src;
148 StoreWToOffset(kStoreWord, src.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100149 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000150}
151
152void Arm64Assembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
153 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100154 CHECK(src.IsXRegister()) << src;
155 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000156}
157
158void Arm64Assembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm,
159 ManagedRegister m_scratch) {
160 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100161 CHECK(scratch.IsXRegister()) << scratch;
162 LoadImmediate(scratch.AsXRegister(), imm);
163 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100164 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000165}
166
Serban Constantinescu75b91132014-04-09 18:39:10 +0100167void Arm64Assembler::StoreImmediateToThread64(ThreadOffset<8> offs, uint32_t imm,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000168 ManagedRegister m_scratch) {
169 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100170 CHECK(scratch.IsXRegister()) << scratch;
171 LoadImmediate(scratch.AsXRegister(), imm);
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100172 StoreToOffset(scratch.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000173}
174
Serban Constantinescu75b91132014-04-09 18:39:10 +0100175void Arm64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000176 FrameOffset fr_offs,
177 ManagedRegister m_scratch) {
178 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100179 CHECK(scratch.IsXRegister()) << scratch;
180 AddConstant(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100181 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000182}
183
Serban Constantinescu75b91132014-04-09 18:39:10 +0100184void Arm64Assembler::StoreStackPointerToThread64(ThreadOffset<8> tr_offs) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100185 UseScratchRegisterScope temps(vixl_masm_);
186 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100187 ___ Mov(temp, reg_x(SP));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100188 ___ Str(temp, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000189}
190
191void Arm64Assembler::StoreSpanning(FrameOffset dest_off, ManagedRegister m_source,
192 FrameOffset in_off, ManagedRegister m_scratch) {
193 Arm64ManagedRegister source = m_source.AsArm64();
194 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100195 StoreToOffset(source.AsXRegister(), SP, dest_off.Int32Value());
196 LoadFromOffset(scratch.AsXRegister(), SP, in_off.Int32Value());
197 StoreToOffset(scratch.AsXRegister(), SP, dest_off.Int32Value() + 8);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000198}
199
200// Load routines.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100201void Arm64Assembler::LoadImmediate(XRegister dest, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000202 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100203 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000204 ___ Mov(reg_x(dest), value);
205 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100206 // temp = value
207 // rd = cond ? temp : rd
Serban Constantinescued8dd492014-02-11 14:15:10 +0000208 if (value != 0) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100209 UseScratchRegisterScope temps(vixl_masm_);
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100210 temps.Exclude(reg_x(dest));
Scott Wakeling97c72b72016-06-24 16:19:36 +0100211 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100212 ___ Mov(temp, value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100213 ___ Csel(reg_x(dest), temp, reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000214 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100215 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000216 }
217 }
218}
219
220void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100221 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000222 switch (type) {
223 case kLoadSignedByte:
224 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset));
225 break;
226 case kLoadSignedHalfword:
227 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset));
228 break;
229 case kLoadUnsignedByte:
230 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset));
231 break;
232 case kLoadUnsignedHalfword:
233 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset));
234 break;
235 case kLoadWord:
236 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset));
237 break;
238 default:
239 LOG(FATAL) << "UNREACHABLE";
240 }
241}
242
243// Note: We can extend this member by adding load type info - see
244// sign extended A64 load variants.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100245void Arm64Assembler::LoadFromOffset(XRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000246 int32_t offset) {
247 CHECK_NE(dest, SP);
248 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset));
249}
250
Alexandre Rames37c92df2014-10-17 14:35:27 +0100251void Arm64Assembler::LoadSFromOffset(SRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000252 int32_t offset) {
253 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset));
254}
255
Alexandre Rames37c92df2014-10-17 14:35:27 +0100256void Arm64Assembler::LoadDFromOffset(DRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000257 int32_t offset) {
258 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset));
259}
260
Alexandre Rames37c92df2014-10-17 14:35:27 +0100261void Arm64Assembler::Load(Arm64ManagedRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000262 int32_t offset, size_t size) {
263 if (dest.IsNoRegister()) {
264 CHECK_EQ(0u, size) << dest;
265 } else if (dest.IsWRegister()) {
266 CHECK_EQ(4u, size) << dest;
267 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset));
Alexandre Rames37c92df2014-10-17 14:35:27 +0100268 } else if (dest.IsXRegister()) {
269 CHECK_NE(dest.AsXRegister(), SP) << dest;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100270 if (size == 4u) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100271 ___ Ldr(reg_w(dest.AsOverlappingWRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100272 } else {
273 CHECK_EQ(8u, size) << dest;
Alexandre Rames37c92df2014-10-17 14:35:27 +0100274 ___ Ldr(reg_x(dest.AsXRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100275 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000276 } else if (dest.IsSRegister()) {
277 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset));
278 } else {
279 CHECK(dest.IsDRegister()) << dest;
280 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset));
281 }
282}
283
284void Arm64Assembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
285 return Load(m_dst.AsArm64(), SP, src.Int32Value(), size);
286}
287
Serban Constantinescu75b91132014-04-09 18:39:10 +0100288void Arm64Assembler::LoadFromThread64(ManagedRegister m_dst, ThreadOffset<8> src, size_t size) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100289 return Load(m_dst.AsArm64(), TR, src.Int32Value(), size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000290}
291
292void Arm64Assembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
293 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100294 CHECK(dst.IsXRegister()) << dst;
295 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000296}
297
Mathieu Chartiere401d142015-04-22 13:56:20 -0700298void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100299 bool unpoison_reference) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000300 Arm64ManagedRegister dst = m_dst.AsArm64();
301 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100302 CHECK(dst.IsXRegister() && base.IsXRegister());
303 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100304 offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +0100305 if (unpoison_reference) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100306 WRegister ref_reg = dst.AsOverlappingWRegister();
Roland Levillain4d027112015-07-01 15:41:14 +0100307 MaybeUnpoisonHeapReference(reg_w(ref_reg));
Hiroshi Yamauchib88f0b12014-09-26 14:55:38 -0700308 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000309}
310
311void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
312 Arm64ManagedRegister dst = m_dst.AsArm64();
313 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100314 CHECK(dst.IsXRegister() && base.IsXRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100315 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100316 UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100317 temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
318 ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000319}
320
Serban Constantinescu75b91132014-04-09 18:39:10 +0100321void Arm64Assembler::LoadRawPtrFromThread64(ManagedRegister m_dst, ThreadOffset<8> offs) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000322 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100323 CHECK(dst.IsXRegister()) << dst;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100324 LoadFromOffset(dst.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000325}
326
327// Copying routines.
328void Arm64Assembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t size) {
329 Arm64ManagedRegister dst = m_dst.AsArm64();
330 Arm64ManagedRegister src = m_src.AsArm64();
331 if (!dst.Equals(src)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100332 if (dst.IsXRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100333 if (size == 4) {
334 CHECK(src.IsWRegister());
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000335 ___ Mov(reg_w(dst.AsOverlappingWRegister()), reg_w(src.AsWRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100336 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100337 if (src.IsXRegister()) {
338 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100339 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000340 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsOverlappingXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100341 }
342 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000343 } else if (dst.IsWRegister()) {
344 CHECK(src.IsWRegister()) << src;
345 ___ Mov(reg_w(dst.AsWRegister()), reg_w(src.AsWRegister()));
346 } else if (dst.IsSRegister()) {
347 CHECK(src.IsSRegister()) << src;
348 ___ Fmov(reg_s(dst.AsSRegister()), reg_s(src.AsSRegister()));
349 } else {
350 CHECK(dst.IsDRegister()) << dst;
351 CHECK(src.IsDRegister()) << src;
352 ___ Fmov(reg_d(dst.AsDRegister()), reg_d(src.AsDRegister()));
353 }
354 }
355}
356
Serban Constantinescu75b91132014-04-09 18:39:10 +0100357void Arm64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
358 ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000359 ManagedRegister m_scratch) {
360 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100361 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100362 LoadFromOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100363 StoreToOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000364}
365
Serban Constantinescu75b91132014-04-09 18:39:10 +0100366void Arm64Assembler::CopyRawPtrToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000367 FrameOffset fr_offs,
368 ManagedRegister m_scratch) {
369 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100370 CHECK(scratch.IsXRegister()) << scratch;
371 LoadFromOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100372 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000373}
374
375void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
376 ManagedRegister m_scratch) {
377 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100378 CHECK(scratch.IsXRegister()) << scratch;
379 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100380 SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100381 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100382 SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000383}
384
385void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src,
386 ManagedRegister m_scratch, size_t size) {
387 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100388 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000389 CHECK(size == 4 || size == 8) << size;
390 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100391 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP, src.Int32Value());
392 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000393 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100394 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
395 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000396 } else {
397 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
398 }
399}
400
401void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
402 ManagedRegister m_scratch, size_t size) {
403 Arm64ManagedRegister scratch = m_scratch.AsArm64();
404 Arm64ManagedRegister base = src_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100405 CHECK(base.IsXRegister()) << base;
406 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000407 CHECK(size == 4 || size == 8) << size;
408 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100409 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000410 src_offset.Int32Value());
411 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
412 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100413 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), src_offset.Int32Value());
414 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000415 } else {
416 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
417 }
418}
419
420void Arm64Assembler::Copy(ManagedRegister m_dest_base, Offset dest_offs, FrameOffset src,
421 ManagedRegister m_scratch, size_t size) {
422 Arm64ManagedRegister scratch = m_scratch.AsArm64();
423 Arm64ManagedRegister base = m_dest_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100424 CHECK(base.IsXRegister()) << base;
425 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000426 CHECK(size == 4 || size == 8) << size;
427 if (size == 4) {
428 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100429 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000430 dest_offs.Int32Value());
431 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100432 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
433 StoreToOffset(scratch.AsXRegister(), base.AsXRegister(), dest_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000434 } else {
435 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
436 }
437}
438
439void Arm64Assembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
440 ManagedRegister /*mscratch*/, size_t /*size*/) {
441 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
442}
443
444void Arm64Assembler::Copy(ManagedRegister m_dest, Offset dest_offset,
445 ManagedRegister m_src, Offset src_offset,
446 ManagedRegister m_scratch, size_t size) {
447 Arm64ManagedRegister scratch = m_scratch.AsArm64();
448 Arm64ManagedRegister src = m_src.AsArm64();
449 Arm64ManagedRegister dest = m_dest.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100450 CHECK(dest.IsXRegister()) << dest;
451 CHECK(src.IsXRegister()) << src;
452 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000453 CHECK(size == 4 || size == 8) << size;
454 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100455 if (scratch.IsWRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100456 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000457 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100458 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000459 dest_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100460 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100461 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), src.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100462 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100463 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), dest.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100464 dest_offset.Int32Value());
465 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000466 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100467 LoadFromOffset(scratch.AsXRegister(), src.AsXRegister(), src_offset.Int32Value());
468 StoreToOffset(scratch.AsXRegister(), dest.AsXRegister(), dest_offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000469 } else {
470 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
471 }
472}
473
474void Arm64Assembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/,
475 FrameOffset /*src*/, Offset /*src_offset*/,
476 ManagedRegister /*scratch*/, size_t /*size*/) {
477 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
478}
479
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700480void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch ATTRIBUTE_UNUSED) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000481 // TODO: Should we check that m_scratch is IP? - see arm.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100482 ___ Dmb(InnerShareable, BarrierAll);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000483}
484
Andreas Gamped1104322014-05-01 14:38:56 -0700485void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
486 Arm64ManagedRegister reg = mreg.AsArm64();
487 CHECK(size == 1 || size == 2) << size;
488 CHECK(reg.IsWRegister()) << reg;
489 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000490 ___ Sxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700491 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000492 ___ Sxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700493 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000494}
495
Andreas Gamped1104322014-05-01 14:38:56 -0700496void Arm64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
497 Arm64ManagedRegister reg = mreg.AsArm64();
498 CHECK(size == 1 || size == 2) << size;
499 CHECK(reg.IsWRegister()) << reg;
500 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000501 ___ Uxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700502 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000503 ___ Uxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700504 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000505}
506
507void Arm64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
508 // TODO: not validating references.
509}
510
511void Arm64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
512 // TODO: not validating references.
513}
514
515void Arm64Assembler::Call(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
516 Arm64ManagedRegister base = m_base.AsArm64();
517 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100518 CHECK(base.IsXRegister()) << base;
519 CHECK(scratch.IsXRegister()) << scratch;
520 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), offs.Int32Value());
521 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000522}
523
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700524void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
525 Arm64ManagedRegister base = m_base.AsArm64();
526 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100527 CHECK(base.IsXRegister()) << base;
528 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100529 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100530 UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100531 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister()));
532 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
533 ___ Br(reg_x(scratch.AsXRegister()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700534}
535
Serban Constantinescued8dd492014-02-11 14:15:10 +0000536void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
537 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100538 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000539 // Call *(*(SP + base) + offset)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700540 LoadFromOffset(scratch.AsXRegister(), SP, base.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100541 LoadFromOffset(scratch.AsXRegister(), scratch.AsXRegister(), offs.Int32Value());
542 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000543}
544
Serban Constantinescu75b91132014-04-09 18:39:10 +0100545void Arm64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*scratch*/) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000546 UNIMPLEMENTED(FATAL) << "Unimplemented Call() variant";
547}
548
Mathieu Chartiere401d142015-04-22 13:56:20 -0700549void Arm64Assembler::CreateHandleScopeEntry(
550 ManagedRegister m_out_reg, FrameOffset handle_scope_offs, ManagedRegister m_in_reg,
551 bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000552 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
553 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700554 // For now we only hold stale handle scope entries in x registers.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100555 CHECK(in_reg.IsNoRegister() || in_reg.IsXRegister()) << in_reg;
556 CHECK(out_reg.IsXRegister()) << out_reg;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000557 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700558 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
559 // the address in the handle scope holding the reference.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000560 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
561 if (in_reg.IsNoRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100562 LoadWFromOffset(kLoadWord, out_reg.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700563 handle_scope_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000564 in_reg = out_reg;
565 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100566 ___ Cmp(reg_w(in_reg.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000567 if (!out_reg.Equals(in_reg)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100568 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000569 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100570 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000571 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100572 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000573 }
574}
575
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700576void Arm64Assembler::CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handle_scope_offset,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700577 ManagedRegister m_scratch, bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000578 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100579 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000580 if (null_allowed) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100581 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700582 handle_scope_offset.Int32Value());
583 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
584 // the address in the handle scope holding the reference.
585 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Alexandre Rames37c92df2014-10-17 14:35:27 +0100586 ___ Cmp(reg_w(scratch.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000587 // Move this logic in add constants with flags.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100588 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000589 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100590 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000591 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100592 StoreToOffset(scratch.AsXRegister(), SP, out_off.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000593}
594
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700595void Arm64Assembler::LoadReferenceFromHandleScope(ManagedRegister m_out_reg,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700596 ManagedRegister m_in_reg) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000597 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
598 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100599 CHECK(out_reg.IsXRegister()) << out_reg;
600 CHECK(in_reg.IsXRegister()) << in_reg;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100601 vixl::aarch64::Label exit;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000602 if (!out_reg.Equals(in_reg)) {
603 // FIXME: Who sets the flags here?
Alexandre Rames37c92df2014-10-17 14:35:27 +0100604 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000605 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100606 ___ Cbz(reg_x(in_reg.AsXRegister()), &exit);
607 LoadFromOffset(out_reg.AsXRegister(), in_reg.AsXRegister(), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000608 ___ Bind(&exit);
609}
610
611void Arm64Assembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
612 CHECK_ALIGNED(stack_adjust, kStackAlignment);
613 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100614 exception_blocks_.emplace_back(new Arm64Exception(scratch, stack_adjust));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100615 LoadFromOffset(scratch.AsXRegister(), TR, Thread::ExceptionOffset<8>().Int32Value());
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100616 ___ Cbnz(reg_x(scratch.AsXRegister()), exception_blocks_.back()->Entry());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000617}
618
619void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100620 UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100621 temps.Exclude(reg_x(exception->scratch_.AsXRegister()));
Scott Wakeling97c72b72016-06-24 16:19:36 +0100622 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100623
624 // Bind exception poll entry.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000625 ___ Bind(exception->Entry());
626 if (exception->stack_adjust_ != 0) { // Fix up the frame.
627 DecreaseFrameSize(exception->stack_adjust_);
628 }
629 // Pass exception object as argument.
630 // Don't care about preserving X0 as this won't return.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100631 ___ Mov(reg_x(X0), reg_x(exception->scratch_.AsXRegister()));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100632 ___ Ldr(temp, MEM_OP(reg_x(TR), QUICK_ENTRYPOINT_OFFSET(8, pDeliverException).Int32Value()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100633
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100634 ___ Blr(temp);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000635 // Call should never return.
636 ___ Brk();
637}
638
Zheng Xu69a50302015-04-14 20:04:41 +0800639static inline dwarf::Reg DWARFReg(CPURegister reg) {
640 if (reg.IsFPRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100641 return dwarf::Reg::Arm64Fp(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800642 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100643 DCHECK_LT(reg.GetCode(), 31u); // X0 - X30.
644 return dwarf::Reg::Arm64Core(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800645 }
David Srbeckydd973932015-04-07 20:29:48 +0100646}
647
Scott Wakeling97c72b72016-06-24 16:19:36 +0100648void Arm64Assembler::SpillRegisters(CPURegList registers, int offset) {
649 int size = registers.GetRegisterSizeInBytes();
Zheng Xu69a50302015-04-14 20:04:41 +0800650 const Register sp = vixl_masm_->StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100651 // Since we are operating on register pairs, we would like to align on
652 // double the standard size; on the other hand, we don't want to insert
653 // an extra store, which will happen if the number of registers is even.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100654 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100655 const CPURegister& dst0 = registers.PopLowestIndex();
656 ___ Str(dst0, MemOperand(sp, offset));
657 cfi_.RelOffset(DWARFReg(dst0), offset);
658 offset += size;
659 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100660 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +0800661 const CPURegister& dst0 = registers.PopLowestIndex();
662 const CPURegister& dst1 = registers.PopLowestIndex();
663 ___ Stp(dst0, dst1, MemOperand(sp, offset));
664 cfi_.RelOffset(DWARFReg(dst0), offset);
665 cfi_.RelOffset(DWARFReg(dst1), offset + size);
666 offset += 2 * size;
667 }
668 if (!registers.IsEmpty()) {
669 const CPURegister& dst0 = registers.PopLowestIndex();
670 ___ Str(dst0, MemOperand(sp, offset));
671 cfi_.RelOffset(DWARFReg(dst0), offset);
672 }
673 DCHECK(registers.IsEmpty());
David Srbeckydd973932015-04-07 20:29:48 +0100674}
675
Scott Wakeling97c72b72016-06-24 16:19:36 +0100676void Arm64Assembler::UnspillRegisters(CPURegList registers, int offset) {
677 int size = registers.GetRegisterSizeInBytes();
Zheng Xu69a50302015-04-14 20:04:41 +0800678 const Register sp = vixl_masm_->StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100679 // Be consistent with the logic for spilling registers.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100680 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100681 const CPURegister& dst0 = registers.PopLowestIndex();
682 ___ Ldr(dst0, MemOperand(sp, offset));
683 cfi_.Restore(DWARFReg(dst0));
684 offset += size;
685 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100686 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +0800687 const CPURegister& dst0 = registers.PopLowestIndex();
688 const CPURegister& dst1 = registers.PopLowestIndex();
689 ___ Ldp(dst0, dst1, MemOperand(sp, offset));
690 cfi_.Restore(DWARFReg(dst0));
691 cfi_.Restore(DWARFReg(dst1));
692 offset += 2 * size;
693 }
694 if (!registers.IsEmpty()) {
695 const CPURegister& dst0 = registers.PopLowestIndex();
696 ___ Ldr(dst0, MemOperand(sp, offset));
697 cfi_.Restore(DWARFReg(dst0));
698 }
699 DCHECK(registers.IsEmpty());
700}
Ian Rogers790a6b72014-04-01 10:36:00 -0700701
Vladimir Marko32248382016-05-19 10:37:24 +0100702void Arm64Assembler::BuildFrame(size_t frame_size,
703 ManagedRegister method_reg,
704 ArrayRef<const ManagedRegister> callee_save_regs,
Zheng Xu69a50302015-04-14 20:04:41 +0800705 const ManagedRegisterEntrySpills& entry_spills) {
706 // Setup VIXL CPURegList for callee-saves.
707 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
708 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
709 for (auto r : callee_save_regs) {
710 Arm64ManagedRegister reg = r.AsArm64();
711 if (reg.IsXRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100712 core_reg_list.Combine(reg_x(reg.AsXRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800713 } else {
714 DCHECK(reg.IsDRegister());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100715 fp_reg_list.Combine(reg_d(reg.AsDRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800716 }
717 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100718 size_t core_reg_size = core_reg_list.GetTotalSizeInBytes();
719 size_t fp_reg_size = fp_reg_list.GetTotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000720
Zheng Xu69a50302015-04-14 20:04:41 +0800721 // Increase frame to required size.
722 DCHECK_ALIGNED(frame_size, kStackAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700723 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + kArm64PointerSize);
Zheng Xub551fdc2014-07-25 11:49:42 +0800724 IncreaseFrameSize(frame_size);
725
Zheng Xu69a50302015-04-14 20:04:41 +0800726 // Save callee-saves.
727 SpillRegisters(core_reg_list, frame_size - core_reg_size);
728 SpillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100729
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100730 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000731
Mathieu Chartiere401d142015-04-22 13:56:20 -0700732 // Write ArtMethod*
Zheng Xu69a50302015-04-14 20:04:41 +0800733 DCHECK(X0 == method_reg.AsArm64().AsXRegister());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700734 StoreToOffset(X0, SP, 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000735
Serban Constantinescu75b91132014-04-09 18:39:10 +0100736 // Write out entry spills
Mathieu Chartiere401d142015-04-22 13:56:20 -0700737 int32_t offset = frame_size + kArm64PointerSize;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000738 for (size_t i = 0; i < entry_spills.size(); ++i) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100739 Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
740 if (reg.IsNoRegister()) {
741 // only increment stack offset.
742 ManagedRegisterSpill spill = entry_spills.at(i);
743 offset += spill.getSize();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100744 } else if (reg.IsXRegister()) {
745 StoreToOffset(reg.AsXRegister(), SP, offset);
Serban Constantinescu75b91132014-04-09 18:39:10 +0100746 offset += 8;
747 } else if (reg.IsWRegister()) {
748 StoreWToOffset(kStoreWord, reg.AsWRegister(), SP, offset);
749 offset += 4;
750 } else if (reg.IsDRegister()) {
751 StoreDToOffset(reg.AsDRegister(), SP, offset);
752 offset += 8;
753 } else if (reg.IsSRegister()) {
754 StoreSToOffset(reg.AsSRegister(), SP, offset);
755 offset += 4;
756 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000757 }
758}
759
Zheng Xu69a50302015-04-14 20:04:41 +0800760void Arm64Assembler::RemoveFrame(size_t frame_size,
Vladimir Marko32248382016-05-19 10:37:24 +0100761 ArrayRef<const ManagedRegister> callee_save_regs) {
Zheng Xu69a50302015-04-14 20:04:41 +0800762 // Setup VIXL CPURegList for callee-saves.
763 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
764 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
765 for (auto r : callee_save_regs) {
766 Arm64ManagedRegister reg = r.AsArm64();
767 if (reg.IsXRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100768 core_reg_list.Combine(reg_x(reg.AsXRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800769 } else {
770 DCHECK(reg.IsDRegister());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100771 fp_reg_list.Combine(reg_d(reg.AsDRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800772 }
773 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100774 size_t core_reg_size = core_reg_list.GetTotalSizeInBytes();
775 size_t fp_reg_size = fp_reg_list.GetTotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000776
Zheng Xu69a50302015-04-14 20:04:41 +0800777 // For now we only check that the size of the frame is large enough to hold spills and method
778 // reference.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700779 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + kArm64PointerSize);
Zheng Xu69a50302015-04-14 20:04:41 +0800780 DCHECK_ALIGNED(frame_size, kStackAlignment);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000781
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100782 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100783
Zheng Xu69a50302015-04-14 20:04:41 +0800784 cfi_.RememberState();
785
786 // Restore callee-saves.
787 UnspillRegisters(core_reg_list, frame_size - core_reg_size);
788 UnspillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100789
Zheng Xub551fdc2014-07-25 11:49:42 +0800790 // Decrease frame size to start of callee saved regs.
791 DecreaseFrameSize(frame_size);
792
Serban Constantinescued8dd492014-02-11 14:15:10 +0000793 // Pop callee saved and return to LR.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000794 ___ Ret();
David Srbeckydd973932015-04-07 20:29:48 +0100795
796 // The CFI should be restored for any code that follows the exit block.
797 cfi_.RestoreState();
798 cfi_.DefCFAOffset(frame_size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000799}
800
Scott Wakeling97c72b72016-06-24 16:19:36 +0100801void Arm64Assembler::PoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100802 DCHECK(reg.IsW());
803 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100804 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100805}
806
Scott Wakeling97c72b72016-06-24 16:19:36 +0100807void Arm64Assembler::UnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100808 DCHECK(reg.IsW());
809 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100810 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100811}
812
Scott Wakeling97c72b72016-06-24 16:19:36 +0100813void Arm64Assembler::MaybeUnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100814 if (kPoisonHeapReferences) {
815 UnpoisonHeapReference(reg);
816 }
817}
818
819#undef ___
820
Serban Constantinescued8dd492014-02-11 14:15:10 +0000821} // namespace arm64
822} // namespace art