blob: 0e175120410b4631ca336294ea88d6c068875831 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
Alexandre Ramesba9388c2014-08-22 14:08:36 +010023using namespace vixl; // NOLINT(build/namespaces)
24
Serban Constantinescued8dd492014-02-11 14:15:10 +000025namespace art {
26namespace arm64 {
27
28#ifdef ___
29#error "ARM64 Assembler macro already defined."
30#else
31#define ___ vixl_masm_->
32#endif
33
Vladimir Markocf93a5c2015-06-16 11:33:24 +000034void Arm64Assembler::FinalizeCode() {
Serban Constantinescued8dd492014-02-11 14:15:10 +000035 if (!exception_blocks_.empty()) {
36 for (size_t i = 0; i < exception_blocks_.size(); i++) {
37 EmitExceptionPoll(exception_blocks_.at(i));
38 }
39 }
40 ___ FinalizeCode();
41}
42
43size_t Arm64Assembler::CodeSize() const {
Alexandre Ramescee75242014-10-08 18:41:21 +010044 return vixl_masm_->BufferCapacity() - vixl_masm_->RemainingBufferSpace();
Serban Constantinescued8dd492014-02-11 14:15:10 +000045}
46
Alexandre Rameseb7b7392015-06-19 14:47:01 +010047const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const {
48 return vixl_masm_->GetStartAddress<uint8_t*>();
49}
50
Serban Constantinescued8dd492014-02-11 14:15:10 +000051void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
52 // Copy the instructions from the buffer.
Alexandre Ramescee75242014-10-08 18:41:21 +010053 MemoryRegion from(vixl_masm_->GetStartAddress<void*>(), CodeSize());
Serban Constantinescued8dd492014-02-11 14:15:10 +000054 region.CopyFrom(0, from);
55}
56
57void Arm64Assembler::GetCurrentThread(ManagedRegister tr) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010058 ___ Mov(reg_x(tr.AsArm64().AsXRegister()), reg_x(TR));
Serban Constantinescued8dd492014-02-11 14:15:10 +000059}
60
61void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010062 StoreToOffset(TR, SP, offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +000063}
64
65// See Arm64 PCS Section 5.2.2.1.
66void Arm64Assembler::IncreaseFrameSize(size_t adjust) {
67 CHECK_ALIGNED(adjust, kStackAlignment);
68 AddConstant(SP, -adjust);
David Srbeckydd973932015-04-07 20:29:48 +010069 cfi().AdjustCFAOffset(adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000070}
71
72// See Arm64 PCS Section 5.2.2.1.
73void Arm64Assembler::DecreaseFrameSize(size_t adjust) {
74 CHECK_ALIGNED(adjust, kStackAlignment);
75 AddConstant(SP, adjust);
David Srbeckydd973932015-04-07 20:29:48 +010076 cfi().AdjustCFAOffset(-adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000077}
78
Alexandre Rames37c92df2014-10-17 14:35:27 +010079void Arm64Assembler::AddConstant(XRegister rd, int32_t value, Condition cond) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000080 AddConstant(rd, rd, value, cond);
81}
82
Alexandre Rames37c92df2014-10-17 14:35:27 +010083void Arm64Assembler::AddConstant(XRegister rd, XRegister rn, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +000084 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +010085 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000086 // VIXL macro-assembler handles all variants.
87 ___ Add(reg_x(rd), reg_x(rn), value);
88 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010089 // temp = rd + value
90 // rd = cond ? temp : rn
91 vixl::UseScratchRegisterScope temps(vixl_masm_);
92 temps.Exclude(reg_x(rd), reg_x(rn));
93 vixl::Register temp = temps.AcquireX();
94 ___ Add(temp, reg_x(rn), value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +010095 ___ Csel(reg_x(rd), temp, reg_x(rd), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +000096 }
97}
98
99void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100100 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000101 switch (type) {
102 case kStoreByte:
103 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset));
104 break;
105 case kStoreHalfword:
106 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset));
107 break;
108 case kStoreWord:
109 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset));
110 break;
111 default:
112 LOG(FATAL) << "UNREACHABLE";
113 }
114}
115
Alexandre Rames37c92df2014-10-17 14:35:27 +0100116void Arm64Assembler::StoreToOffset(XRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000117 CHECK_NE(source, SP);
118 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset));
119}
120
Alexandre Rames37c92df2014-10-17 14:35:27 +0100121void Arm64Assembler::StoreSToOffset(SRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000122 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset));
123}
124
Alexandre Rames37c92df2014-10-17 14:35:27 +0100125void Arm64Assembler::StoreDToOffset(DRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000126 ___ Str(reg_d(source), MEM_OP(reg_x(base), offset));
127}
128
129void Arm64Assembler::Store(FrameOffset offs, ManagedRegister m_src, size_t size) {
130 Arm64ManagedRegister src = m_src.AsArm64();
131 if (src.IsNoRegister()) {
132 CHECK_EQ(0u, size);
133 } else if (src.IsWRegister()) {
134 CHECK_EQ(4u, size);
135 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100136 } else if (src.IsXRegister()) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000137 CHECK_EQ(8u, size);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100138 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000139 } else if (src.IsSRegister()) {
140 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value());
141 } else {
142 CHECK(src.IsDRegister()) << src;
143 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value());
144 }
145}
146
147void Arm64Assembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
148 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100149 CHECK(src.IsXRegister()) << src;
150 StoreWToOffset(kStoreWord, src.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100151 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000152}
153
154void Arm64Assembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
155 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100156 CHECK(src.IsXRegister()) << src;
157 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000158}
159
160void Arm64Assembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm,
161 ManagedRegister m_scratch) {
162 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100163 CHECK(scratch.IsXRegister()) << scratch;
164 LoadImmediate(scratch.AsXRegister(), imm);
165 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100166 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000167}
168
Serban Constantinescu75b91132014-04-09 18:39:10 +0100169void Arm64Assembler::StoreImmediateToThread64(ThreadOffset<8> offs, uint32_t imm,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000170 ManagedRegister m_scratch) {
171 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100172 CHECK(scratch.IsXRegister()) << scratch;
173 LoadImmediate(scratch.AsXRegister(), imm);
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100174 StoreToOffset(scratch.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000175}
176
Serban Constantinescu75b91132014-04-09 18:39:10 +0100177void Arm64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000178 FrameOffset fr_offs,
179 ManagedRegister m_scratch) {
180 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100181 CHECK(scratch.IsXRegister()) << scratch;
182 AddConstant(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100183 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000184}
185
Serban Constantinescu75b91132014-04-09 18:39:10 +0100186void Arm64Assembler::StoreStackPointerToThread64(ThreadOffset<8> tr_offs) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100187 vixl::UseScratchRegisterScope temps(vixl_masm_);
188 vixl::Register temp = temps.AcquireX();
189 ___ Mov(temp, reg_x(SP));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100190 ___ Str(temp, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000191}
192
193void Arm64Assembler::StoreSpanning(FrameOffset dest_off, ManagedRegister m_source,
194 FrameOffset in_off, ManagedRegister m_scratch) {
195 Arm64ManagedRegister source = m_source.AsArm64();
196 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100197 StoreToOffset(source.AsXRegister(), SP, dest_off.Int32Value());
198 LoadFromOffset(scratch.AsXRegister(), SP, in_off.Int32Value());
199 StoreToOffset(scratch.AsXRegister(), SP, dest_off.Int32Value() + 8);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000200}
201
202// Load routines.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100203void Arm64Assembler::LoadImmediate(XRegister dest, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000204 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100205 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000206 ___ Mov(reg_x(dest), value);
207 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100208 // temp = value
209 // rd = cond ? temp : rd
Serban Constantinescued8dd492014-02-11 14:15:10 +0000210 if (value != 0) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100211 vixl::UseScratchRegisterScope temps(vixl_masm_);
212 temps.Exclude(reg_x(dest));
213 vixl::Register temp = temps.AcquireX();
214 ___ Mov(temp, value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100215 ___ Csel(reg_x(dest), temp, reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000216 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100217 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000218 }
219 }
220}
221
222void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100223 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000224 switch (type) {
225 case kLoadSignedByte:
226 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset));
227 break;
228 case kLoadSignedHalfword:
229 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset));
230 break;
231 case kLoadUnsignedByte:
232 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset));
233 break;
234 case kLoadUnsignedHalfword:
235 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset));
236 break;
237 case kLoadWord:
238 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset));
239 break;
240 default:
241 LOG(FATAL) << "UNREACHABLE";
242 }
243}
244
245// Note: We can extend this member by adding load type info - see
246// sign extended A64 load variants.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100247void Arm64Assembler::LoadFromOffset(XRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000248 int32_t offset) {
249 CHECK_NE(dest, SP);
250 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset));
251}
252
Alexandre Rames37c92df2014-10-17 14:35:27 +0100253void Arm64Assembler::LoadSFromOffset(SRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000254 int32_t offset) {
255 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset));
256}
257
Alexandre Rames37c92df2014-10-17 14:35:27 +0100258void Arm64Assembler::LoadDFromOffset(DRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000259 int32_t offset) {
260 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset));
261}
262
Alexandre Rames37c92df2014-10-17 14:35:27 +0100263void Arm64Assembler::Load(Arm64ManagedRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000264 int32_t offset, size_t size) {
265 if (dest.IsNoRegister()) {
266 CHECK_EQ(0u, size) << dest;
267 } else if (dest.IsWRegister()) {
268 CHECK_EQ(4u, size) << dest;
269 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset));
Alexandre Rames37c92df2014-10-17 14:35:27 +0100270 } else if (dest.IsXRegister()) {
271 CHECK_NE(dest.AsXRegister(), SP) << dest;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100272 if (size == 4u) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100273 ___ Ldr(reg_w(dest.AsOverlappingWRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100274 } else {
275 CHECK_EQ(8u, size) << dest;
Alexandre Rames37c92df2014-10-17 14:35:27 +0100276 ___ Ldr(reg_x(dest.AsXRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100277 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000278 } else if (dest.IsSRegister()) {
279 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset));
280 } else {
281 CHECK(dest.IsDRegister()) << dest;
282 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset));
283 }
284}
285
286void Arm64Assembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
287 return Load(m_dst.AsArm64(), SP, src.Int32Value(), size);
288}
289
Serban Constantinescu75b91132014-04-09 18:39:10 +0100290void Arm64Assembler::LoadFromThread64(ManagedRegister m_dst, ThreadOffset<8> src, size_t size) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100291 return Load(m_dst.AsArm64(), TR, src.Int32Value(), size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000292}
293
294void Arm64Assembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
295 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100296 CHECK(dst.IsXRegister()) << dst;
297 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000298}
299
Mathieu Chartiere401d142015-04-22 13:56:20 -0700300void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100301 bool unpoison_reference) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000302 Arm64ManagedRegister dst = m_dst.AsArm64();
303 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100304 CHECK(dst.IsXRegister() && base.IsXRegister());
305 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100306 offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +0100307 if (unpoison_reference) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100308 WRegister ref_reg = dst.AsOverlappingWRegister();
Roland Levillain4d027112015-07-01 15:41:14 +0100309 MaybeUnpoisonHeapReference(reg_w(ref_reg));
Hiroshi Yamauchib88f0b12014-09-26 14:55:38 -0700310 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000311}
312
313void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
314 Arm64ManagedRegister dst = m_dst.AsArm64();
315 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100316 CHECK(dst.IsXRegister() && base.IsXRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100317 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
318 vixl::UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100319 temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
320 ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000321}
322
Serban Constantinescu75b91132014-04-09 18:39:10 +0100323void Arm64Assembler::LoadRawPtrFromThread64(ManagedRegister m_dst, ThreadOffset<8> offs) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000324 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100325 CHECK(dst.IsXRegister()) << dst;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100326 LoadFromOffset(dst.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000327}
328
329// Copying routines.
330void Arm64Assembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t size) {
331 Arm64ManagedRegister dst = m_dst.AsArm64();
332 Arm64ManagedRegister src = m_src.AsArm64();
333 if (!dst.Equals(src)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100334 if (dst.IsXRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100335 if (size == 4) {
336 CHECK(src.IsWRegister());
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000337 ___ Mov(reg_w(dst.AsOverlappingWRegister()), reg_w(src.AsWRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100338 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100339 if (src.IsXRegister()) {
340 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100341 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000342 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsOverlappingXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100343 }
344 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000345 } else if (dst.IsWRegister()) {
346 CHECK(src.IsWRegister()) << src;
347 ___ Mov(reg_w(dst.AsWRegister()), reg_w(src.AsWRegister()));
348 } else if (dst.IsSRegister()) {
349 CHECK(src.IsSRegister()) << src;
350 ___ Fmov(reg_s(dst.AsSRegister()), reg_s(src.AsSRegister()));
351 } else {
352 CHECK(dst.IsDRegister()) << dst;
353 CHECK(src.IsDRegister()) << src;
354 ___ Fmov(reg_d(dst.AsDRegister()), reg_d(src.AsDRegister()));
355 }
356 }
357}
358
Serban Constantinescu75b91132014-04-09 18:39:10 +0100359void Arm64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
360 ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000361 ManagedRegister m_scratch) {
362 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100363 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100364 LoadFromOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100365 StoreToOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000366}
367
Serban Constantinescu75b91132014-04-09 18:39:10 +0100368void Arm64Assembler::CopyRawPtrToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000369 FrameOffset fr_offs,
370 ManagedRegister m_scratch) {
371 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100372 CHECK(scratch.IsXRegister()) << scratch;
373 LoadFromOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100374 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000375}
376
377void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
378 ManagedRegister m_scratch) {
379 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100380 CHECK(scratch.IsXRegister()) << scratch;
381 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100382 SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100383 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100384 SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000385}
386
387void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src,
388 ManagedRegister m_scratch, size_t size) {
389 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100390 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000391 CHECK(size == 4 || size == 8) << size;
392 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100393 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP, src.Int32Value());
394 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000395 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100396 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
397 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000398 } else {
399 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
400 }
401}
402
403void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
404 ManagedRegister m_scratch, size_t size) {
405 Arm64ManagedRegister scratch = m_scratch.AsArm64();
406 Arm64ManagedRegister base = src_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100407 CHECK(base.IsXRegister()) << base;
408 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000409 CHECK(size == 4 || size == 8) << size;
410 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100411 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000412 src_offset.Int32Value());
413 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
414 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100415 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), src_offset.Int32Value());
416 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000417 } else {
418 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
419 }
420}
421
422void Arm64Assembler::Copy(ManagedRegister m_dest_base, Offset dest_offs, FrameOffset src,
423 ManagedRegister m_scratch, size_t size) {
424 Arm64ManagedRegister scratch = m_scratch.AsArm64();
425 Arm64ManagedRegister base = m_dest_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100426 CHECK(base.IsXRegister()) << base;
427 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000428 CHECK(size == 4 || size == 8) << size;
429 if (size == 4) {
430 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100431 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000432 dest_offs.Int32Value());
433 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100434 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
435 StoreToOffset(scratch.AsXRegister(), base.AsXRegister(), dest_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000436 } else {
437 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
438 }
439}
440
441void Arm64Assembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
442 ManagedRegister /*mscratch*/, size_t /*size*/) {
443 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
444}
445
446void Arm64Assembler::Copy(ManagedRegister m_dest, Offset dest_offset,
447 ManagedRegister m_src, Offset src_offset,
448 ManagedRegister m_scratch, size_t size) {
449 Arm64ManagedRegister scratch = m_scratch.AsArm64();
450 Arm64ManagedRegister src = m_src.AsArm64();
451 Arm64ManagedRegister dest = m_dest.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100452 CHECK(dest.IsXRegister()) << dest;
453 CHECK(src.IsXRegister()) << src;
454 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000455 CHECK(size == 4 || size == 8) << size;
456 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100457 if (scratch.IsWRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100458 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000459 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100460 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000461 dest_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100462 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100463 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), src.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100464 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100465 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), dest.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100466 dest_offset.Int32Value());
467 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000468 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100469 LoadFromOffset(scratch.AsXRegister(), src.AsXRegister(), src_offset.Int32Value());
470 StoreToOffset(scratch.AsXRegister(), dest.AsXRegister(), dest_offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000471 } else {
472 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
473 }
474}
475
476void Arm64Assembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/,
477 FrameOffset /*src*/, Offset /*src_offset*/,
478 ManagedRegister /*scratch*/, size_t /*size*/) {
479 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
480}
481
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700482void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch ATTRIBUTE_UNUSED) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000483 // TODO: Should we check that m_scratch is IP? - see arm.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000484 ___ Dmb(vixl::InnerShareable, vixl::BarrierAll);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000485}
486
Andreas Gamped1104322014-05-01 14:38:56 -0700487void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
488 Arm64ManagedRegister reg = mreg.AsArm64();
489 CHECK(size == 1 || size == 2) << size;
490 CHECK(reg.IsWRegister()) << reg;
491 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000492 ___ Sxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700493 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000494 ___ Sxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700495 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000496}
497
Andreas Gamped1104322014-05-01 14:38:56 -0700498void Arm64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
499 Arm64ManagedRegister reg = mreg.AsArm64();
500 CHECK(size == 1 || size == 2) << size;
501 CHECK(reg.IsWRegister()) << reg;
502 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000503 ___ Uxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700504 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000505 ___ Uxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700506 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000507}
508
509void Arm64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
510 // TODO: not validating references.
511}
512
513void Arm64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
514 // TODO: not validating references.
515}
516
517void Arm64Assembler::Call(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
518 Arm64ManagedRegister base = m_base.AsArm64();
519 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100520 CHECK(base.IsXRegister()) << base;
521 CHECK(scratch.IsXRegister()) << scratch;
522 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), offs.Int32Value());
523 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000524}
525
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700526void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
527 Arm64ManagedRegister base = m_base.AsArm64();
528 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100529 CHECK(base.IsXRegister()) << base;
530 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100531 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
532 vixl::UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100533 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister()));
534 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
535 ___ Br(reg_x(scratch.AsXRegister()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700536}
537
Serban Constantinescued8dd492014-02-11 14:15:10 +0000538void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
539 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100540 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000541 // Call *(*(SP + base) + offset)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700542 LoadFromOffset(scratch.AsXRegister(), SP, base.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100543 LoadFromOffset(scratch.AsXRegister(), scratch.AsXRegister(), offs.Int32Value());
544 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000545}
546
Serban Constantinescu75b91132014-04-09 18:39:10 +0100547void Arm64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*scratch*/) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000548 UNIMPLEMENTED(FATAL) << "Unimplemented Call() variant";
549}
550
Mathieu Chartiere401d142015-04-22 13:56:20 -0700551void Arm64Assembler::CreateHandleScopeEntry(
552 ManagedRegister m_out_reg, FrameOffset handle_scope_offs, ManagedRegister m_in_reg,
553 bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000554 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
555 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700556 // For now we only hold stale handle scope entries in x registers.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100557 CHECK(in_reg.IsNoRegister() || in_reg.IsXRegister()) << in_reg;
558 CHECK(out_reg.IsXRegister()) << out_reg;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000559 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700560 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
561 // the address in the handle scope holding the reference.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000562 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
563 if (in_reg.IsNoRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100564 LoadWFromOffset(kLoadWord, out_reg.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700565 handle_scope_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000566 in_reg = out_reg;
567 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100568 ___ Cmp(reg_w(in_reg.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000569 if (!out_reg.Equals(in_reg)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100570 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000571 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100572 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000573 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100574 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000575 }
576}
577
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700578void Arm64Assembler::CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handle_scope_offset,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700579 ManagedRegister m_scratch, bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000580 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100581 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000582 if (null_allowed) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100583 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700584 handle_scope_offset.Int32Value());
585 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
586 // the address in the handle scope holding the reference.
587 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Alexandre Rames37c92df2014-10-17 14:35:27 +0100588 ___ Cmp(reg_w(scratch.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000589 // Move this logic in add constants with flags.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100590 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000591 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100592 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000593 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100594 StoreToOffset(scratch.AsXRegister(), SP, out_off.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000595}
596
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700597void Arm64Assembler::LoadReferenceFromHandleScope(ManagedRegister m_out_reg,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700598 ManagedRegister m_in_reg) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000599 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
600 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100601 CHECK(out_reg.IsXRegister()) << out_reg;
602 CHECK(in_reg.IsXRegister()) << in_reg;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000603 vixl::Label exit;
604 if (!out_reg.Equals(in_reg)) {
605 // FIXME: Who sets the flags here?
Alexandre Rames37c92df2014-10-17 14:35:27 +0100606 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000607 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100608 ___ Cbz(reg_x(in_reg.AsXRegister()), &exit);
609 LoadFromOffset(out_reg.AsXRegister(), in_reg.AsXRegister(), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000610 ___ Bind(&exit);
611}
612
613void Arm64Assembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
614 CHECK_ALIGNED(stack_adjust, kStackAlignment);
615 Arm64ManagedRegister scratch = m_scratch.AsArm64();
616 Arm64Exception *current_exception = new Arm64Exception(scratch, stack_adjust);
617 exception_blocks_.push_back(current_exception);
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100618 LoadFromOffset(scratch.AsXRegister(), TR, Thread::ExceptionOffset<8>().Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100619 ___ Cbnz(reg_x(scratch.AsXRegister()), current_exception->Entry());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000620}
621
622void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100623 vixl::UseScratchRegisterScope temps(vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100624 temps.Exclude(reg_x(exception->scratch_.AsXRegister()));
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100625 vixl::Register temp = temps.AcquireX();
626
627 // Bind exception poll entry.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000628 ___ Bind(exception->Entry());
629 if (exception->stack_adjust_ != 0) { // Fix up the frame.
630 DecreaseFrameSize(exception->stack_adjust_);
631 }
632 // Pass exception object as argument.
633 // Don't care about preserving X0 as this won't return.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100634 ___ Mov(reg_x(X0), reg_x(exception->scratch_.AsXRegister()));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100635 ___ Ldr(temp, MEM_OP(reg_x(TR), QUICK_ENTRYPOINT_OFFSET(8, pDeliverException).Int32Value()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100636
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100637 ___ Blr(temp);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000638 // Call should never return.
639 ___ Brk();
640}
641
Zheng Xu69a50302015-04-14 20:04:41 +0800642static inline dwarf::Reg DWARFReg(CPURegister reg) {
643 if (reg.IsFPRegister()) {
644 return dwarf::Reg::Arm64Fp(reg.code());
645 } else {
646 DCHECK_LT(reg.code(), 31u); // X0 - X30.
647 return dwarf::Reg::Arm64Core(reg.code());
648 }
David Srbeckydd973932015-04-07 20:29:48 +0100649}
650
Zheng Xu69a50302015-04-14 20:04:41 +0800651void Arm64Assembler::SpillRegisters(vixl::CPURegList registers, int offset) {
652 int size = registers.RegisterSizeInBytes();
653 const Register sp = vixl_masm_->StackPointer();
654 while (registers.Count() >= 2) {
655 const CPURegister& dst0 = registers.PopLowestIndex();
656 const CPURegister& dst1 = registers.PopLowestIndex();
657 ___ Stp(dst0, dst1, MemOperand(sp, offset));
658 cfi_.RelOffset(DWARFReg(dst0), offset);
659 cfi_.RelOffset(DWARFReg(dst1), offset + size);
660 offset += 2 * size;
661 }
662 if (!registers.IsEmpty()) {
663 const CPURegister& dst0 = registers.PopLowestIndex();
664 ___ Str(dst0, MemOperand(sp, offset));
665 cfi_.RelOffset(DWARFReg(dst0), offset);
666 }
667 DCHECK(registers.IsEmpty());
David Srbeckydd973932015-04-07 20:29:48 +0100668}
669
Zheng Xu69a50302015-04-14 20:04:41 +0800670void Arm64Assembler::UnspillRegisters(vixl::CPURegList registers, int offset) {
671 int size = registers.RegisterSizeInBytes();
672 const Register sp = vixl_masm_->StackPointer();
673 while (registers.Count() >= 2) {
674 const CPURegister& dst0 = registers.PopLowestIndex();
675 const CPURegister& dst1 = registers.PopLowestIndex();
676 ___ Ldp(dst0, dst1, MemOperand(sp, offset));
677 cfi_.Restore(DWARFReg(dst0));
678 cfi_.Restore(DWARFReg(dst1));
679 offset += 2 * size;
680 }
681 if (!registers.IsEmpty()) {
682 const CPURegister& dst0 = registers.PopLowestIndex();
683 ___ Ldr(dst0, MemOperand(sp, offset));
684 cfi_.Restore(DWARFReg(dst0));
685 }
686 DCHECK(registers.IsEmpty());
687}
Ian Rogers790a6b72014-04-01 10:36:00 -0700688
Serban Constantinescued8dd492014-02-11 14:15:10 +0000689void Arm64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
Zheng Xu69a50302015-04-14 20:04:41 +0800690 const std::vector<ManagedRegister>& callee_save_regs,
691 const ManagedRegisterEntrySpills& entry_spills) {
692 // Setup VIXL CPURegList for callee-saves.
693 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
694 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
695 for (auto r : callee_save_regs) {
696 Arm64ManagedRegister reg = r.AsArm64();
697 if (reg.IsXRegister()) {
698 core_reg_list.Combine(reg_x(reg.AsXRegister()).code());
699 } else {
700 DCHECK(reg.IsDRegister());
701 fp_reg_list.Combine(reg_d(reg.AsDRegister()).code());
702 }
703 }
704 size_t core_reg_size = core_reg_list.TotalSizeInBytes();
705 size_t fp_reg_size = fp_reg_list.TotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000706
Zheng Xu69a50302015-04-14 20:04:41 +0800707 // Increase frame to required size.
708 DCHECK_ALIGNED(frame_size, kStackAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700709 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + kArm64PointerSize);
Zheng Xub551fdc2014-07-25 11:49:42 +0800710 IncreaseFrameSize(frame_size);
711
Zheng Xu69a50302015-04-14 20:04:41 +0800712 // Save callee-saves.
713 SpillRegisters(core_reg_list, frame_size - core_reg_size);
714 SpillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100715
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100716 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000717
Mathieu Chartiere401d142015-04-22 13:56:20 -0700718 // Write ArtMethod*
Zheng Xu69a50302015-04-14 20:04:41 +0800719 DCHECK(X0 == method_reg.AsArm64().AsXRegister());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700720 StoreToOffset(X0, SP, 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000721
Serban Constantinescu75b91132014-04-09 18:39:10 +0100722 // Write out entry spills
Mathieu Chartiere401d142015-04-22 13:56:20 -0700723 int32_t offset = frame_size + kArm64PointerSize;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000724 for (size_t i = 0; i < entry_spills.size(); ++i) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100725 Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
726 if (reg.IsNoRegister()) {
727 // only increment stack offset.
728 ManagedRegisterSpill spill = entry_spills.at(i);
729 offset += spill.getSize();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100730 } else if (reg.IsXRegister()) {
731 StoreToOffset(reg.AsXRegister(), SP, offset);
Serban Constantinescu75b91132014-04-09 18:39:10 +0100732 offset += 8;
733 } else if (reg.IsWRegister()) {
734 StoreWToOffset(kStoreWord, reg.AsWRegister(), SP, offset);
735 offset += 4;
736 } else if (reg.IsDRegister()) {
737 StoreDToOffset(reg.AsDRegister(), SP, offset);
738 offset += 8;
739 } else if (reg.IsSRegister()) {
740 StoreSToOffset(reg.AsSRegister(), SP, offset);
741 offset += 4;
742 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000743 }
744}
745
Zheng Xu69a50302015-04-14 20:04:41 +0800746void Arm64Assembler::RemoveFrame(size_t frame_size,
747 const std::vector<ManagedRegister>& callee_save_regs) {
748 // Setup VIXL CPURegList for callee-saves.
749 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
750 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
751 for (auto r : callee_save_regs) {
752 Arm64ManagedRegister reg = r.AsArm64();
753 if (reg.IsXRegister()) {
754 core_reg_list.Combine(reg_x(reg.AsXRegister()).code());
755 } else {
756 DCHECK(reg.IsDRegister());
757 fp_reg_list.Combine(reg_d(reg.AsDRegister()).code());
758 }
759 }
760 size_t core_reg_size = core_reg_list.TotalSizeInBytes();
761 size_t fp_reg_size = fp_reg_list.TotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000762
Zheng Xu69a50302015-04-14 20:04:41 +0800763 // For now we only check that the size of the frame is large enough to hold spills and method
764 // reference.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700765 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + kArm64PointerSize);
Zheng Xu69a50302015-04-14 20:04:41 +0800766 DCHECK_ALIGNED(frame_size, kStackAlignment);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000767
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100768 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100769
Zheng Xu69a50302015-04-14 20:04:41 +0800770 cfi_.RememberState();
771
772 // Restore callee-saves.
773 UnspillRegisters(core_reg_list, frame_size - core_reg_size);
774 UnspillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100775
Zheng Xub551fdc2014-07-25 11:49:42 +0800776 // Decrease frame size to start of callee saved regs.
777 DecreaseFrameSize(frame_size);
778
Serban Constantinescued8dd492014-02-11 14:15:10 +0000779 // Pop callee saved and return to LR.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000780 ___ Ret();
David Srbeckydd973932015-04-07 20:29:48 +0100781
782 // The CFI should be restored for any code that follows the exit block.
783 cfi_.RestoreState();
784 cfi_.DefCFAOffset(frame_size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000785}
786
Roland Levillain4d027112015-07-01 15:41:14 +0100787void Arm64Assembler::PoisonHeapReference(vixl::Register reg) {
788 DCHECK(reg.IsW());
789 // reg = -reg.
790 ___ Neg(reg, vixl::Operand(reg));
791}
792
793void Arm64Assembler::UnpoisonHeapReference(vixl::Register reg) {
794 DCHECK(reg.IsW());
795 // reg = -reg.
796 ___ Neg(reg, vixl::Operand(reg));
797}
798
799void Arm64Assembler::MaybeUnpoisonHeapReference(vixl::Register reg) {
800 if (kPoisonHeapReferences) {
801 UnpoisonHeapReference(reg);
802 }
803}
804
805#undef ___
806
Serban Constantinescued8dd492014-02-11 14:15:10 +0000807} // namespace arm64
808} // namespace art