blob: a9617e1212e1c546c65b5a6d3ba369997bf73ef8 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100142 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800143 int64_t spill_offset,
144 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
146 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
147 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800148 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100149 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800150 codegen->GetNumberOfFloatingPointRegisters()));
151
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
153 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100188 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
189 for (uint32_t i : LowToHighBits(core_spills)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800193 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800198 }
199
Vladimir Marko804b03f2016-09-14 16:26:36 +0100200 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
201 for (uint32_t i : LowToHighBits(fp_spills)) {
202 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
203 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
204 saved_fpu_stack_offsets_[i] = stack_offset;
205 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800206 }
207
Vladimir Marko804b03f2016-09-14 16:26:36 +0100208 SaveRestoreLiveRegistersHelper(codegen,
209 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800210 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
211}
212
213void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100214 SaveRestoreLiveRegistersHelper(codegen,
215 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800216 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
217}
218
Alexandre Rames5319def2014-10-23 10:03:10 +0100219class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
220 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000221 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100222
Alexandre Rames67555f72014-11-18 10:55:16 +0000223 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100224 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000225 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226
Alexandre Rames5319def2014-10-23 10:03:10 +0100227 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000228 if (instruction_->CanThrowIntoCatchBlock()) {
229 // Live registers will be restored in the catch block if caught.
230 SaveLiveRegisters(codegen, instruction_->GetLocations());
231 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000232 // We're moving two locations to locations that could overlap, so we need a parallel
233 // move resolver.
234 InvokeRuntimeCallingConvention calling_convention;
235 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100236 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
237 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000238 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
239 ? kQuickThrowStringBounds
240 : kQuickThrowArrayBounds;
241 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100242 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800243 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100244 }
245
Alexandre Rames8158f282015-08-07 10:26:17 +0100246 bool IsFatal() const OVERRIDE { return true; }
247
Alexandre Rames9931f312015-06-19 14:47:01 +0100248 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
249
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
252};
253
Alexandre Rames67555f72014-11-18 10:55:16 +0000254class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
255 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000256 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000257
258 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
260 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000261 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800262 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 }
264
Alexandre Rames8158f282015-08-07 10:26:17 +0100265 bool IsFatal() const OVERRIDE { return true; }
266
Alexandre Rames9931f312015-06-19 14:47:01 +0100267 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
268
Alexandre Rames67555f72014-11-18 10:55:16 +0000269 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
271};
272
273class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
274 public:
275 LoadClassSlowPathARM64(HLoadClass* cls,
276 HInstruction* at,
277 uint32_t dex_pc,
278 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000279 : SlowPathCodeARM64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000280 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
281 }
282
283 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000284 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000285 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
286
287 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000288 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000289
290 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000291 dex::TypeIndex type_index = cls_->GetTypeIndex();
292 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000293 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
294 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000295 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800296 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100297 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800298 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100299 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000301
302 // Move the class to the desired location.
303 Location out = locations->Out();
304 if (out.IsValid()) {
305 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000306 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000307 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000308 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000309 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000310 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
311 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
312 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
313 DCHECK(out.IsValid());
314 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
315 Register temp = temps.AcquireX();
316 const DexFile& dex_file = cls_->GetDexFile();
317 // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
318 // kSaveEverything and use a temporary for the ADRP in the fast path, so that we
319 // can avoid the ADRP here.
320 vixl::aarch64::Label* adrp_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000321 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000322 arm64_codegen->EmitAdrpPlaceholder(adrp_label, temp);
323 vixl::aarch64::Label* strp_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000324 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000325 {
326 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
327 __ Bind(strp_label);
328 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
329 MemOperand(temp, /* offset placeholder */ 0));
330 }
331 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000332 __ B(GetExitLabel());
333 }
334
Alexandre Rames9931f312015-06-19 14:47:01 +0100335 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
336
Alexandre Rames67555f72014-11-18 10:55:16 +0000337 private:
338 // The class this slow path will load.
339 HLoadClass* const cls_;
340
Alexandre Rames67555f72014-11-18 10:55:16 +0000341 // The dex PC of `at_`.
342 const uint32_t dex_pc_;
343
344 // Whether to initialize the class.
345 const bool do_clinit_;
346
347 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
348};
349
Vladimir Markoaad75c62016-10-03 08:46:48 +0000350class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
351 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100352 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
353 : SlowPathCodeARM64(instruction),
354 temp_(temp),
355 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000356
357 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
358 LocationSummary* locations = instruction_->GetLocations();
359 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
360 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
361
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100362 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
363 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
364 temps.Exclude(temp_);
365
Vladimir Markoaad75c62016-10-03 08:46:48 +0000366 __ Bind(GetEntryLabel());
367 SaveLiveRegisters(codegen, locations);
368
369 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000370 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
371 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000372 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
373 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
374 Primitive::Type type = instruction_->GetType();
375 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
376
377 RestoreLiveRegisters(codegen, locations);
378
379 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000380 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100381 if (!kUseReadBarrier || kUseBakerReadBarrier) {
382 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
383 } else {
384 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
385 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
386 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
387 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000388 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100389 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000390 {
391 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
392 __ Bind(strp_label);
393 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100394 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000395 }
396
397 __ B(GetExitLabel());
398 }
399
400 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
401
402 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100403 const Register temp_;
404 vixl::aarch64::Label* adrp_label_;
405
Vladimir Markoaad75c62016-10-03 08:46:48 +0000406 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
407};
408
Alexandre Rames5319def2014-10-23 10:03:10 +0100409class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
410 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000411 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100412
Alexandre Rames67555f72014-11-18 10:55:16 +0000413 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
414 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100415 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000416 if (instruction_->CanThrowIntoCatchBlock()) {
417 // Live registers will be restored in the catch block if caught.
418 SaveLiveRegisters(codegen, instruction_->GetLocations());
419 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000420 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
421 instruction_,
422 instruction_->GetDexPc(),
423 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800424 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 }
426
Alexandre Rames8158f282015-08-07 10:26:17 +0100427 bool IsFatal() const OVERRIDE { return true; }
428
Alexandre Rames9931f312015-06-19 14:47:01 +0100429 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
430
Alexandre Rames5319def2014-10-23 10:03:10 +0100431 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100432 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
433};
434
435class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100437 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000438 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100439
Alexandre Rames67555f72014-11-18 10:55:16 +0000440 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
441 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100442 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000443 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800444 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000445 if (successor_ == nullptr) {
446 __ B(GetReturnLabel());
447 } else {
448 __ B(arm64_codegen->GetLabelOf(successor_));
449 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 }
451
Scott Wakeling97c72b72016-06-24 16:19:36 +0100452 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100453 DCHECK(successor_ == nullptr);
454 return &return_label_;
455 }
456
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100457 HBasicBlock* GetSuccessor() const {
458 return successor_;
459 }
460
Alexandre Rames9931f312015-06-19 14:47:01 +0100461 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
462
Alexandre Rames5319def2014-10-23 10:03:10 +0100463 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100464 // If not null, the block to branch to after the suspend check.
465 HBasicBlock* const successor_;
466
467 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100468 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100469
470 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
471};
472
Alexandre Rames67555f72014-11-18 10:55:16 +0000473class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
474 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000475 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000476 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000479 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800480
Alexandre Rames3e69f162014-12-10 10:36:50 +0000481 DCHECK(instruction_->IsCheckCast()
482 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
483 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100484 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000487
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000488 if (!is_fatal_) {
489 SaveLiveRegisters(codegen, locations);
490 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000491
492 // We're moving two locations to locations that could overlap, so we need a parallel
493 // move resolver.
494 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800495 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 LocationFrom(calling_convention.GetRegisterAt(0)),
497 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800498 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800499 LocationFrom(calling_convention.GetRegisterAt(1)),
500 Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000501 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000502 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800503 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000504 Primitive::Type ret_type = instruction_->GetType();
505 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
506 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
507 } else {
508 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800509 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
510 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511 }
512
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000513 if (!is_fatal_) {
514 RestoreLiveRegisters(codegen, locations);
515 __ B(GetExitLabel());
516 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000517 }
518
Alexandre Rames9931f312015-06-19 14:47:01 +0100519 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100520 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100521
Alexandre Rames67555f72014-11-18 10:55:16 +0000522 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000523 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000524
Alexandre Rames67555f72014-11-18 10:55:16 +0000525 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
526};
527
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700528class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
529 public:
Aart Bik42249c32016-01-07 15:33:50 -0800530 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000531 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700532
533 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800534 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000536 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000537 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700538 }
539
Alexandre Rames9931f312015-06-19 14:47:01 +0100540 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
541
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700542 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700543 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
544};
545
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100546class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
547 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000548 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100549
550 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
551 LocationSummary* locations = instruction_->GetLocations();
552 __ Bind(GetEntryLabel());
553 SaveLiveRegisters(codegen, locations);
554
555 InvokeRuntimeCallingConvention calling_convention;
556 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
557 parallel_move.AddMove(
558 locations->InAt(0),
559 LocationFrom(calling_convention.GetRegisterAt(0)),
560 Primitive::kPrimNot,
561 nullptr);
562 parallel_move.AddMove(
563 locations->InAt(1),
564 LocationFrom(calling_convention.GetRegisterAt(1)),
565 Primitive::kPrimInt,
566 nullptr);
567 parallel_move.AddMove(
568 locations->InAt(2),
569 LocationFrom(calling_convention.GetRegisterAt(2)),
570 Primitive::kPrimNot,
571 nullptr);
572 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
573
574 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000575 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100576 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
577 RestoreLiveRegisters(codegen, locations);
578 __ B(GetExitLabel());
579 }
580
581 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
582
583 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100584 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
585};
586
Zheng Xu3927c8b2015-11-18 17:46:25 +0800587void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
588 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000589 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800590
591 // We are about to use the assembler to place literals directly. Make sure we have enough
592 // underlying code buffer and we have generated the jump table with right size.
Scott Wakelingb77051e2016-11-21 19:46:00 +0000593 vixl::CodeBufferCheckScope scope(codegen->GetVIXLAssembler(),
594 num_entries * sizeof(int32_t),
595 vixl::CodeBufferCheckScope::kReserveBufferSpace,
596 vixl::CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800597
598 __ Bind(&table_start_);
599 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
600 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100601 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800602 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100603 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800604 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
605 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
606 Literal<int32_t> literal(jump_offset);
607 __ place(&literal);
608 }
609}
610
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100611// Slow path marking an object reference `ref` during a read
612// barrier. The field `obj.field` in the object `obj` holding this
613// reference does not get updated by this slow path after marking (see
614// ReadBarrierMarkAndUpdateFieldSlowPathARM64 below for that).
615//
616// This means that after the execution of this slow path, `ref` will
617// always be up-to-date, but `obj.field` may not; i.e., after the
618// flip, `ref` will be a to-space reference, but `obj.field` will
619// probably still be a from-space reference (unless it gets updated by
620// another thread, or if another thread installed another object
621// reference (different from `ref`) in `obj.field`).
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800622// If entrypoint is a valid location it is assumed to already be holding the entrypoint. The case
623// where the entrypoint is passed in is for the GcRoot read barrier.
Roland Levillain44015862016-01-22 11:47:17 +0000624class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
625 public:
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800626 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
627 Location ref,
628 Location entrypoint = Location::NoLocation())
629 : SlowPathCodeARM64(instruction),
630 ref_(ref),
631 entrypoint_(entrypoint) {
Roland Levillain44015862016-01-22 11:47:17 +0000632 DCHECK(kEmitCompilerReadBarrier);
633 }
634
635 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
636
637 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
638 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000639 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100640 DCHECK(ref_.IsRegister()) << ref_;
641 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain44015862016-01-22 11:47:17 +0000642 DCHECK(instruction_->IsInstanceFieldGet() ||
643 instruction_->IsStaticFieldGet() ||
644 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100645 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000646 instruction_->IsLoadClass() ||
647 instruction_->IsLoadString() ||
648 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100649 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100650 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
651 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000652 << "Unexpected instruction in read barrier marking slow path: "
653 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000654 // The read barrier instrumentation of object ArrayGet
655 // instructions does not support the HIntermediateAddress
656 // instruction.
657 DCHECK(!(instruction_->IsArrayGet() &&
658 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain44015862016-01-22 11:47:17 +0000659
660 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100661 // No need to save live registers; it's taken care of by the
662 // entrypoint. Also, there is no need to update the stack mask,
663 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000664 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100665 DCHECK_NE(ref_.reg(), LR);
666 DCHECK_NE(ref_.reg(), WSP);
667 DCHECK_NE(ref_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100668 // IP0 is used internally by the ReadBarrierMarkRegX entry point
669 // as a temporary, it cannot be the entry point's input/output.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100670 DCHECK_NE(ref_.reg(), IP0);
671 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
Roland Levillain02b75802016-07-13 11:54:35 +0100672 // "Compact" slow path, saving two moves.
673 //
674 // Instead of using the standard runtime calling convention (input
675 // and output in W0):
676 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100677 // W0 <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100678 // W0 <- ReadBarrierMark(W0)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100679 // ref <- W0
Roland Levillain02b75802016-07-13 11:54:35 +0100680 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100681 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100682 // of a dedicated entrypoint:
683 //
684 // rX <- ReadBarrierMarkRegX(rX)
685 //
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800686 if (entrypoint_.IsValid()) {
687 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
688 __ Blr(XRegisterFrom(entrypoint_));
689 } else {
690 // Entrypoint is not already loaded, load from the thread.
691 int32_t entry_point_offset =
692 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
693 // This runtime call does not require a stack map.
694 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
695 }
Roland Levillain44015862016-01-22 11:47:17 +0000696 __ B(GetExitLabel());
697 }
698
699 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100700 // The location (register) of the marked object reference.
701 const Location ref_;
Roland Levillain44015862016-01-22 11:47:17 +0000702
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800703 // The location of the entrypoint if it is already loaded.
704 const Location entrypoint_;
705
Roland Levillain44015862016-01-22 11:47:17 +0000706 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
707};
708
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100709// Slow path marking an object reference `ref` during a read barrier,
710// and if needed, atomically updating the field `obj.field` in the
711// object `obj` holding this reference after marking (contrary to
712// ReadBarrierMarkSlowPathARM64 above, which never tries to update
713// `obj.field`).
714//
715// This means that after the execution of this slow path, both `ref`
716// and `obj.field` will be up-to-date; i.e., after the flip, both will
717// hold the same to-space reference (unless another thread installed
718// another object reference (different from `ref`) in `obj.field`).
719class ReadBarrierMarkAndUpdateFieldSlowPathARM64 : public SlowPathCodeARM64 {
720 public:
721 ReadBarrierMarkAndUpdateFieldSlowPathARM64(HInstruction* instruction,
722 Location ref,
723 Register obj,
724 Location field_offset,
725 Register temp)
726 : SlowPathCodeARM64(instruction),
727 ref_(ref),
728 obj_(obj),
729 field_offset_(field_offset),
730 temp_(temp) {
731 DCHECK(kEmitCompilerReadBarrier);
732 }
733
734 const char* GetDescription() const OVERRIDE {
735 return "ReadBarrierMarkAndUpdateFieldSlowPathARM64";
736 }
737
738 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
739 LocationSummary* locations = instruction_->GetLocations();
740 Register ref_reg = WRegisterFrom(ref_);
741 DCHECK(locations->CanCall());
742 DCHECK(ref_.IsRegister()) << ref_;
743 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
744 // This slow path is only used by the UnsafeCASObject intrinsic.
745 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
746 << "Unexpected instruction in read barrier marking and field updating slow path: "
747 << instruction_->DebugName();
748 DCHECK(instruction_->GetLocations()->Intrinsified());
749 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
750 DCHECK(field_offset_.IsRegister()) << field_offset_;
751
752 __ Bind(GetEntryLabel());
753
754 // Save the old reference.
755 // Note that we cannot use IP to save the old reference, as IP is
756 // used internally by the ReadBarrierMarkRegX entry point, and we
757 // need the old reference after the call to that entry point.
758 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
759 __ Mov(temp_.W(), ref_reg);
760
761 // No need to save live registers; it's taken care of by the
762 // entrypoint. Also, there is no need to update the stack mask,
763 // as this runtime call will not trigger a garbage collection.
764 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
765 DCHECK_NE(ref_.reg(), LR);
766 DCHECK_NE(ref_.reg(), WSP);
767 DCHECK_NE(ref_.reg(), WZR);
768 // IP0 is used internally by the ReadBarrierMarkRegX entry point
769 // as a temporary, it cannot be the entry point's input/output.
770 DCHECK_NE(ref_.reg(), IP0);
771 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
772 // "Compact" slow path, saving two moves.
773 //
774 // Instead of using the standard runtime calling convention (input
775 // and output in W0):
776 //
777 // W0 <- ref
778 // W0 <- ReadBarrierMark(W0)
779 // ref <- W0
780 //
781 // we just use rX (the register containing `ref`) as input and output
782 // of a dedicated entrypoint:
783 //
784 // rX <- ReadBarrierMarkRegX(rX)
785 //
786 int32_t entry_point_offset =
787 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
788 // This runtime call does not require a stack map.
789 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
790
791 // If the new reference is different from the old reference,
792 // update the field in the holder (`*(obj_ + field_offset_)`).
793 //
794 // Note that this field could also hold a different object, if
795 // another thread had concurrently changed it. In that case, the
796 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
797 // (CAS) operation below would abort the CAS, leaving the field
798 // as-is.
799 vixl::aarch64::Label done;
800 __ Cmp(temp_.W(), ref_reg);
801 __ B(eq, &done);
802
803 // Update the the holder's field atomically. This may fail if
804 // mutator updates before us, but it's OK. This is achieved
805 // using a strong compare-and-set (CAS) operation with relaxed
806 // memory synchronization ordering, where the expected value is
807 // the old reference and the desired value is the new reference.
808
809 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
810 UseScratchRegisterScope temps(masm);
811
812 // Convenience aliases.
813 Register base = obj_.W();
814 Register offset = XRegisterFrom(field_offset_);
815 Register expected = temp_.W();
816 Register value = ref_reg;
817 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
818 Register tmp_value = temps.AcquireW(); // Value in memory.
819
820 __ Add(tmp_ptr, base.X(), Operand(offset));
821
822 if (kPoisonHeapReferences) {
823 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
824 if (value.Is(expected)) {
825 // Do not poison `value`, as it is the same register as
826 // `expected`, which has just been poisoned.
827 } else {
828 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
829 }
830 }
831
832 // do {
833 // tmp_value = [tmp_ptr] - expected;
834 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
835
Roland Levillain24a4d112016-10-26 13:10:46 +0100836 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100837 __ Bind(&loop_head);
838 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
839 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +0100840 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100841 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
842 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +0100843 __ B(&exit_loop);
844 __ Bind(&comparison_failed);
845 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100846 __ Bind(&exit_loop);
847
848 if (kPoisonHeapReferences) {
849 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
850 if (value.Is(expected)) {
851 // Do not unpoison `value`, as it is the same register as
852 // `expected`, which has just been unpoisoned.
853 } else {
854 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
855 }
856 }
857
858 __ Bind(&done);
859 __ B(GetExitLabel());
860 }
861
862 private:
863 // The location (register) of the marked object reference.
864 const Location ref_;
865 // The register containing the object holding the marked object reference field.
866 const Register obj_;
867 // The location of the offset of the marked reference field within `obj_`.
868 Location field_offset_;
869
870 const Register temp_;
871
872 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathARM64);
873};
874
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000875// Slow path generating a read barrier for a heap reference.
876class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
877 public:
878 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
879 Location out,
880 Location ref,
881 Location obj,
882 uint32_t offset,
883 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000884 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000885 out_(out),
886 ref_(ref),
887 obj_(obj),
888 offset_(offset),
889 index_(index) {
890 DCHECK(kEmitCompilerReadBarrier);
891 // If `obj` is equal to `out` or `ref`, it means the initial object
892 // has been overwritten by (or after) the heap object reference load
893 // to be instrumented, e.g.:
894 //
895 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000896 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000897 //
898 // In that case, we have lost the information about the original
899 // object, and the emitted read barrier cannot work properly.
900 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
901 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
902 }
903
904 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
905 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
906 LocationSummary* locations = instruction_->GetLocations();
907 Primitive::Type type = Primitive::kPrimNot;
908 DCHECK(locations->CanCall());
909 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100910 DCHECK(instruction_->IsInstanceFieldGet() ||
911 instruction_->IsStaticFieldGet() ||
912 instruction_->IsArrayGet() ||
913 instruction_->IsInstanceOf() ||
914 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100915 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000916 << "Unexpected instruction in read barrier for heap reference slow path: "
917 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000918 // The read barrier instrumentation of object ArrayGet
919 // instructions does not support the HIntermediateAddress
920 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000921 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100922 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000923
924 __ Bind(GetEntryLabel());
925
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000926 SaveLiveRegisters(codegen, locations);
927
928 // We may have to change the index's value, but as `index_` is a
929 // constant member (like other "inputs" of this slow path),
930 // introduce a copy of it, `index`.
931 Location index = index_;
932 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100933 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000934 if (instruction_->IsArrayGet()) {
935 // Compute the actual memory offset and store it in `index`.
936 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
937 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
938 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
939 // We are about to change the value of `index_reg` (see the
940 // calls to vixl::MacroAssembler::Lsl and
941 // vixl::MacroAssembler::Mov below), but it has
942 // not been saved by the previous call to
943 // art::SlowPathCode::SaveLiveRegisters, as it is a
944 // callee-save register --
945 // art::SlowPathCode::SaveLiveRegisters does not consider
946 // callee-save registers, as it has been designed with the
947 // assumption that callee-save registers are supposed to be
948 // handled by the called function. So, as a callee-save
949 // register, `index_reg` _would_ eventually be saved onto
950 // the stack, but it would be too late: we would have
951 // changed its value earlier. Therefore, we manually save
952 // it here into another freely available register,
953 // `free_reg`, chosen of course among the caller-save
954 // registers (as a callee-save `free_reg` register would
955 // exhibit the same problem).
956 //
957 // Note we could have requested a temporary register from
958 // the register allocator instead; but we prefer not to, as
959 // this is a slow path, and we know we can find a
960 // caller-save register that is available.
961 Register free_reg = FindAvailableCallerSaveRegister(codegen);
962 __ Mov(free_reg.W(), index_reg);
963 index_reg = free_reg;
964 index = LocationFrom(index_reg);
965 } else {
966 // The initial register stored in `index_` has already been
967 // saved in the call to art::SlowPathCode::SaveLiveRegisters
968 // (as it is not a callee-save register), so we can freely
969 // use it.
970 }
971 // Shifting the index value contained in `index_reg` by the scale
972 // factor (2) cannot overflow in practice, as the runtime is
973 // unable to allocate object arrays with a size larger than
974 // 2^26 - 1 (that is, 2^28 - 4 bytes).
975 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
976 static_assert(
977 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
978 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
979 __ Add(index_reg, index_reg, Operand(offset_));
980 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100981 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
982 // intrinsics, `index_` is not shifted by a scale factor of 2
983 // (as in the case of ArrayGet), as it is actually an offset
984 // to an object field within an object.
985 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000986 DCHECK(instruction_->GetLocations()->Intrinsified());
987 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
988 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
989 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100990 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +0100991 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000992 }
993 }
994
995 // We're moving two or three locations to locations that could
996 // overlap, so we need a parallel move resolver.
997 InvokeRuntimeCallingConvention calling_convention;
998 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
999 parallel_move.AddMove(ref_,
1000 LocationFrom(calling_convention.GetRegisterAt(0)),
1001 type,
1002 nullptr);
1003 parallel_move.AddMove(obj_,
1004 LocationFrom(calling_convention.GetRegisterAt(1)),
1005 type,
1006 nullptr);
1007 if (index.IsValid()) {
1008 parallel_move.AddMove(index,
1009 LocationFrom(calling_convention.GetRegisterAt(2)),
1010 Primitive::kPrimInt,
1011 nullptr);
1012 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1013 } else {
1014 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1015 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1016 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001017 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001018 instruction_,
1019 instruction_->GetDexPc(),
1020 this);
1021 CheckEntrypointTypes<
1022 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1023 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1024
1025 RestoreLiveRegisters(codegen, locations);
1026
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001027 __ B(GetExitLabel());
1028 }
1029
1030 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1031
1032 private:
1033 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001034 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1035 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001036 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1037 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1038 return Register(VIXLRegCodeFromART(i), kXRegSize);
1039 }
1040 }
1041 // We shall never fail to find a free caller-save register, as
1042 // there are more than two core caller-save registers on ARM64
1043 // (meaning it is possible to find one which is different from
1044 // `ref` and `obj`).
1045 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1046 LOG(FATAL) << "Could not find a free register";
1047 UNREACHABLE();
1048 }
1049
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001050 const Location out_;
1051 const Location ref_;
1052 const Location obj_;
1053 const uint32_t offset_;
1054 // An additional location containing an index to an array.
1055 // Only used for HArrayGet and the UnsafeGetObject &
1056 // UnsafeGetObjectVolatile intrinsics.
1057 const Location index_;
1058
1059 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1060};
1061
1062// Slow path generating a read barrier for a GC root.
1063class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1064 public:
1065 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001066 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001067 DCHECK(kEmitCompilerReadBarrier);
1068 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001069
1070 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1071 LocationSummary* locations = instruction_->GetLocations();
1072 Primitive::Type type = Primitive::kPrimNot;
1073 DCHECK(locations->CanCall());
1074 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001075 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1076 << "Unexpected instruction in read barrier for GC root slow path: "
1077 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001078
1079 __ Bind(GetEntryLabel());
1080 SaveLiveRegisters(codegen, locations);
1081
1082 InvokeRuntimeCallingConvention calling_convention;
1083 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1084 // The argument of the ReadBarrierForRootSlow is not a managed
1085 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1086 // thus we need a 64-bit move here, and we cannot use
1087 //
1088 // arm64_codegen->MoveLocation(
1089 // LocationFrom(calling_convention.GetRegisterAt(0)),
1090 // root_,
1091 // type);
1092 //
1093 // which would emit a 32-bit move, as `type` is a (32-bit wide)
1094 // reference type (`Primitive::kPrimNot`).
1095 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001096 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001097 instruction_,
1098 instruction_->GetDexPc(),
1099 this);
1100 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1101 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1102
1103 RestoreLiveRegisters(codegen, locations);
1104 __ B(GetExitLabel());
1105 }
1106
1107 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1108
1109 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001110 const Location out_;
1111 const Location root_;
1112
1113 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1114};
1115
Alexandre Rames5319def2014-10-23 10:03:10 +01001116#undef __
1117
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001118Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001119 Location next_location;
1120 if (type == Primitive::kPrimVoid) {
1121 LOG(FATAL) << "Unreachable type " << type;
1122 }
1123
Alexandre Rames542361f2015-01-29 16:57:31 +00001124 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001125 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1126 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +00001127 } else if (!Primitive::IsFloatingPointType(type) &&
1128 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001129 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1130 } else {
1131 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +00001132 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1133 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001134 }
1135
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001136 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +00001137 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001138 return next_location;
1139}
1140
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001141Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001142 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001143}
1144
Serban Constantinescu579885a2015-02-22 20:51:33 +00001145CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
1146 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001147 const CompilerOptions& compiler_options,
1148 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001149 : CodeGenerator(graph,
1150 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001151 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001152 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001153 callee_saved_core_registers.GetList(),
1154 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001155 compiler_options,
1156 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001157 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +08001158 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001159 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001160 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +00001161 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001162 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001163 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001164 uint32_literals_(std::less<uint32_t>(),
1165 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001166 uint64_literals_(std::less<uint64_t>(),
1167 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001168 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1169 boot_image_string_patches_(StringReferenceValueComparator(),
1170 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1171 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001172 boot_image_type_patches_(TypeReferenceValueComparator(),
1173 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1174 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001175 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001176 boot_image_address_patches_(std::less<uint32_t>(),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001177 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1178 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001179 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1180 jit_class_patches_(TypeReferenceValueComparator(),
1181 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001182 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001183 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001184}
Alexandre Rames5319def2014-10-23 10:03:10 +01001185
Alexandre Rames67555f72014-11-18 10:55:16 +00001186#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001187
Zheng Xu3927c8b2015-11-18 17:46:25 +08001188void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001189 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001190 jump_table->EmitTable(this);
1191 }
1192}
1193
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001194void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001195 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001196 // Ensure we emit the literal pool.
1197 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001198
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001199 CodeGenerator::Finalize(allocator);
1200}
1201
Zheng Xuad4450e2015-04-17 18:48:56 +08001202void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1203 // Note: There are 6 kinds of moves:
1204 // 1. constant -> GPR/FPR (non-cycle)
1205 // 2. constant -> stack (non-cycle)
1206 // 3. GPR/FPR -> GPR/FPR
1207 // 4. GPR/FPR -> stack
1208 // 5. stack -> GPR/FPR
1209 // 6. stack -> stack (non-cycle)
1210 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1211 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1212 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1213 // dependency.
1214 vixl_temps_.Open(GetVIXLAssembler());
1215}
1216
1217void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1218 vixl_temps_.Close();
1219}
1220
1221Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1222 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1223 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1224 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1225 Location scratch = GetScratchLocation(kind);
1226 if (!scratch.Equals(Location::NoLocation())) {
1227 return scratch;
1228 }
1229 // Allocate from VIXL temp registers.
1230 if (kind == Location::kRegister) {
1231 scratch = LocationFrom(vixl_temps_.AcquireX());
1232 } else {
1233 DCHECK(kind == Location::kFpuRegister);
1234 scratch = LocationFrom(vixl_temps_.AcquireD());
1235 }
1236 AddScratchLocation(scratch);
1237 return scratch;
1238}
1239
1240void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1241 if (loc.IsRegister()) {
1242 vixl_temps_.Release(XRegisterFrom(loc));
1243 } else {
1244 DCHECK(loc.IsFpuRegister());
1245 vixl_temps_.Release(DRegisterFrom(loc));
1246 }
1247 RemoveScratchLocation(loc);
1248}
1249
Alexandre Rames3e69f162014-12-10 10:36:50 +00001250void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001251 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001252 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001253}
1254
Alexandre Rames5319def2014-10-23 10:03:10 +01001255void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001256 MacroAssembler* masm = GetVIXLAssembler();
1257 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001258 __ Bind(&frame_entry_label_);
1259
Serban Constantinescu02164b32014-11-13 14:05:07 +00001260 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1261 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001262 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001263 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001264 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001265 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001266 __ Ldr(wzr, MemOperand(temp, 0));
1267 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001268 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001269
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001270 if (!HasEmptyFrame()) {
1271 int frame_size = GetFrameSize();
1272 // Stack layout:
1273 // sp[frame_size - 8] : lr.
1274 // ... : other preserved core registers.
1275 // ... : other preserved fp registers.
1276 // ... : reserved frame space.
1277 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001278
1279 // Save the current method if we need it. Note that we do not
1280 // do this in HCurrentMethod, as the instruction might have been removed
1281 // in the SSA graph.
1282 if (RequiresCurrentMethod()) {
1283 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001284 } else {
1285 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001286 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001287 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001288 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1289 frame_size - GetCoreSpillSize());
1290 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1291 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001292
1293 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1294 // Initialize should_deoptimize flag to 0.
1295 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1296 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1297 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001298 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001299}
1300
1301void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001302 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001303 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001304 if (!HasEmptyFrame()) {
1305 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001306 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1307 frame_size - FrameEntrySpillSize());
1308 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1309 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001310 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001311 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001312 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001313 __ Ret();
1314 GetAssembler()->cfi().RestoreState();
1315 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001316}
1317
Scott Wakeling97c72b72016-06-24 16:19:36 +01001318CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001319 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001320 return CPURegList(CPURegister::kRegister, kXRegSize,
1321 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001322}
1323
Scott Wakeling97c72b72016-06-24 16:19:36 +01001324CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001325 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1326 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001327 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1328 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001329}
1330
Alexandre Rames5319def2014-10-23 10:03:10 +01001331void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1332 __ Bind(GetLabelOf(block));
1333}
1334
Calin Juravle175dc732015-08-25 15:42:32 +01001335void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1336 DCHECK(location.IsRegister());
1337 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1338}
1339
Calin Juravlee460d1d2015-09-29 04:52:17 +01001340void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1341 if (location.IsRegister()) {
1342 locations->AddTemp(location);
1343 } else {
1344 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1345 }
1346}
1347
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001348void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001349 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001350 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001351 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001352 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001353 if (value_can_be_null) {
1354 __ Cbz(value, &done);
1355 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001356 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001357 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001358 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001359 if (value_can_be_null) {
1360 __ Bind(&done);
1361 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001362}
1363
David Brazdil58282f42016-01-14 12:45:10 +00001364void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001365 // Blocked core registers:
1366 // lr : Runtime reserved.
1367 // tr : Runtime reserved.
1368 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1369 // ip1 : VIXL core temp.
1370 // ip0 : VIXL core temp.
1371 //
1372 // Blocked fp registers:
1373 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001374 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1375 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001376 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001377 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001378 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001379
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001380 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001381 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001382 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001383 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001384
David Brazdil58282f42016-01-14 12:45:10 +00001385 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001386 // Stubs do not save callee-save floating point registers. If the graph
1387 // is debuggable, we need to deal with these registers differently. For
1388 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001389 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1390 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001391 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001392 }
1393 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001394}
1395
Alexandre Rames3e69f162014-12-10 10:36:50 +00001396size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1397 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1398 __ Str(reg, MemOperand(sp, stack_index));
1399 return kArm64WordSize;
1400}
1401
1402size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1403 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1404 __ Ldr(reg, MemOperand(sp, stack_index));
1405 return kArm64WordSize;
1406}
1407
1408size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1409 FPRegister reg = FPRegister(reg_id, kDRegSize);
1410 __ Str(reg, MemOperand(sp, stack_index));
1411 return kArm64WordSize;
1412}
1413
1414size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1415 FPRegister reg = FPRegister(reg_id, kDRegSize);
1416 __ Ldr(reg, MemOperand(sp, stack_index));
1417 return kArm64WordSize;
1418}
1419
Alexandre Rames5319def2014-10-23 10:03:10 +01001420void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001421 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001422}
1423
1424void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001425 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001426}
1427
Alexandre Rames67555f72014-11-18 10:55:16 +00001428void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001429 if (constant->IsIntConstant()) {
1430 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1431 } else if (constant->IsLongConstant()) {
1432 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1433 } else if (constant->IsNullConstant()) {
1434 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001435 } else if (constant->IsFloatConstant()) {
1436 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1437 } else {
1438 DCHECK(constant->IsDoubleConstant());
1439 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1440 }
1441}
1442
Alexandre Rames3e69f162014-12-10 10:36:50 +00001443
1444static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1445 DCHECK(constant.IsConstant());
1446 HConstant* cst = constant.GetConstant();
1447 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001448 // Null is mapped to a core W register, which we associate with kPrimInt.
1449 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001450 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1451 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1452 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1453}
1454
Calin Juravlee460d1d2015-09-29 04:52:17 +01001455void CodeGeneratorARM64::MoveLocation(Location destination,
1456 Location source,
1457 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001458 if (source.Equals(destination)) {
1459 return;
1460 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001461
1462 // A valid move can always be inferred from the destination and source
1463 // locations. When moving from and to a register, the argument type can be
1464 // used to generate 32bit instead of 64bit moves. In debug mode we also
1465 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001466 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001467
1468 if (destination.IsRegister() || destination.IsFpuRegister()) {
1469 if (unspecified_type) {
1470 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1471 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001472 (src_cst != nullptr && (src_cst->IsIntConstant()
1473 || src_cst->IsFloatConstant()
1474 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001475 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001476 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001477 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001478 // If the source is a double stack slot or a 64bit constant, a 64bit
1479 // type is appropriate. Else the source is a register, and since the
1480 // type has not been specified, we chose a 64bit type to force a 64bit
1481 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001482 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001483 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001484 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001485 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1486 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1487 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001488 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1489 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1490 __ Ldr(dst, StackOperandFrom(source));
1491 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001492 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001493 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001494 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001495 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001496 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001497 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001498 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001499 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1500 ? Primitive::kPrimLong
1501 : Primitive::kPrimInt;
1502 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1503 }
1504 } else {
1505 DCHECK(source.IsFpuRegister());
1506 if (destination.IsRegister()) {
1507 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1508 ? Primitive::kPrimDouble
1509 : Primitive::kPrimFloat;
1510 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1511 } else {
1512 DCHECK(destination.IsFpuRegister());
1513 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001514 }
1515 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001516 } else { // The destination is not a register. It must be a stack slot.
1517 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1518 if (source.IsRegister() || source.IsFpuRegister()) {
1519 if (unspecified_type) {
1520 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001521 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001522 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001523 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001524 }
1525 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001526 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1527 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1528 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001529 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001530 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1531 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001532 UseScratchRegisterScope temps(GetVIXLAssembler());
1533 HConstant* src_cst = source.GetConstant();
1534 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001535 if (src_cst->IsZeroBitPattern()) {
1536 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001537 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001538 if (src_cst->IsIntConstant()) {
1539 temp = temps.AcquireW();
1540 } else if (src_cst->IsLongConstant()) {
1541 temp = temps.AcquireX();
1542 } else if (src_cst->IsFloatConstant()) {
1543 temp = temps.AcquireS();
1544 } else {
1545 DCHECK(src_cst->IsDoubleConstant());
1546 temp = temps.AcquireD();
1547 }
1548 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001549 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001550 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001551 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001552 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001553 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001554 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001555 // Use any scratch register (a core or a floating-point one)
1556 // from VIXL scratch register pools as a temporary.
1557 //
1558 // We used to only use the FP scratch register pool, but in some
1559 // rare cases the only register from this pool (D31) would
1560 // already be used (e.g. within a ParallelMove instruction, when
1561 // a move is blocked by a another move requiring a scratch FP
1562 // register, which would reserve D31). To prevent this issue, we
1563 // ask for a scratch register of any type (core or FP).
1564 CPURegister temp =
1565 temps.AcquireCPURegisterOfSize(destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001566 __ Ldr(temp, StackOperandFrom(source));
1567 __ Str(temp, StackOperandFrom(destination));
1568 }
1569 }
1570}
1571
1572void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001573 CPURegister dst,
1574 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001575 switch (type) {
1576 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001577 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001578 break;
1579 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001580 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001581 break;
1582 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001583 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001584 break;
1585 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001586 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001587 break;
1588 case Primitive::kPrimInt:
1589 case Primitive::kPrimNot:
1590 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001591 case Primitive::kPrimFloat:
1592 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001593 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001594 __ Ldr(dst, src);
1595 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001596 case Primitive::kPrimVoid:
1597 LOG(FATAL) << "Unreachable type " << type;
1598 }
1599}
1600
Calin Juravle77520bc2015-01-12 18:45:46 +00001601void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001602 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001603 const MemOperand& src,
1604 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001605 MacroAssembler* masm = GetVIXLAssembler();
1606 BlockPoolsScope block_pools(masm);
1607 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001608 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001609 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001610
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001611 DCHECK(!src.IsPreIndex());
1612 DCHECK(!src.IsPostIndex());
1613
1614 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001615 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001616 MemOperand base = MemOperand(temp_base);
1617 switch (type) {
1618 case Primitive::kPrimBoolean:
1619 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001620 if (needs_null_check) {
1621 MaybeRecordImplicitNullCheck(instruction);
1622 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001623 break;
1624 case Primitive::kPrimByte:
1625 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001626 if (needs_null_check) {
1627 MaybeRecordImplicitNullCheck(instruction);
1628 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001629 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1630 break;
1631 case Primitive::kPrimChar:
1632 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001633 if (needs_null_check) {
1634 MaybeRecordImplicitNullCheck(instruction);
1635 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001636 break;
1637 case Primitive::kPrimShort:
1638 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001639 if (needs_null_check) {
1640 MaybeRecordImplicitNullCheck(instruction);
1641 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001642 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1643 break;
1644 case Primitive::kPrimInt:
1645 case Primitive::kPrimNot:
1646 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001647 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001648 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001649 if (needs_null_check) {
1650 MaybeRecordImplicitNullCheck(instruction);
1651 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001652 break;
1653 case Primitive::kPrimFloat:
1654 case Primitive::kPrimDouble: {
1655 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001656 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001657
1658 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1659 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001660 if (needs_null_check) {
1661 MaybeRecordImplicitNullCheck(instruction);
1662 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001663 __ Fmov(FPRegister(dst), temp);
1664 break;
1665 }
1666 case Primitive::kPrimVoid:
1667 LOG(FATAL) << "Unreachable type " << type;
1668 }
1669}
1670
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001671void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001672 CPURegister src,
1673 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001674 switch (type) {
1675 case Primitive::kPrimBoolean:
1676 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001677 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001678 break;
1679 case Primitive::kPrimChar:
1680 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001681 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001682 break;
1683 case Primitive::kPrimInt:
1684 case Primitive::kPrimNot:
1685 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001686 case Primitive::kPrimFloat:
1687 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001688 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001689 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001690 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001691 case Primitive::kPrimVoid:
1692 LOG(FATAL) << "Unreachable type " << type;
1693 }
1694}
1695
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001696void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1697 CPURegister src,
1698 const MemOperand& dst) {
1699 UseScratchRegisterScope temps(GetVIXLAssembler());
1700 Register temp_base = temps.AcquireX();
1701
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001702 DCHECK(!dst.IsPreIndex());
1703 DCHECK(!dst.IsPostIndex());
1704
1705 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001706 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001707 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001708 MemOperand base = MemOperand(temp_base);
1709 switch (type) {
1710 case Primitive::kPrimBoolean:
1711 case Primitive::kPrimByte:
1712 __ Stlrb(Register(src), base);
1713 break;
1714 case Primitive::kPrimChar:
1715 case Primitive::kPrimShort:
1716 __ Stlrh(Register(src), base);
1717 break;
1718 case Primitive::kPrimInt:
1719 case Primitive::kPrimNot:
1720 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001721 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001722 __ Stlr(Register(src), base);
1723 break;
1724 case Primitive::kPrimFloat:
1725 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001726 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001727 Register temp_src;
1728 if (src.IsZero()) {
1729 // The zero register is used to avoid synthesizing zero constants.
1730 temp_src = Register(src);
1731 } else {
1732 DCHECK(src.IsFPRegister());
1733 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1734 __ Fmov(temp_src, FPRegister(src));
1735 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001736
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001737 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001738 break;
1739 }
1740 case Primitive::kPrimVoid:
1741 LOG(FATAL) << "Unreachable type " << type;
1742 }
1743}
1744
Calin Juravle175dc732015-08-25 15:42:32 +01001745void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1746 HInstruction* instruction,
1747 uint32_t dex_pc,
1748 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001749 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001750 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001751 if (EntrypointRequiresStackMap(entrypoint)) {
1752 RecordPcInfo(instruction, dex_pc, slow_path);
1753 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001754}
1755
Roland Levillaindec8f632016-07-22 17:10:06 +01001756void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1757 HInstruction* instruction,
1758 SlowPathCode* slow_path) {
1759 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001760 GenerateInvokeRuntime(entry_point_offset);
1761}
1762
1763void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001764 BlockPoolsScope block_pools(GetVIXLAssembler());
1765 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1766 __ Blr(lr);
1767}
1768
Alexandre Rames67555f72014-11-18 10:55:16 +00001769void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001770 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001771 UseScratchRegisterScope temps(GetVIXLAssembler());
1772 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001773 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1774
Serban Constantinescu02164b32014-11-13 14:05:07 +00001775 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001776 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1777 __ Add(temp, class_reg, status_offset);
1778 __ Ldar(temp, HeapOperand(temp));
1779 __ Cmp(temp, mirror::Class::kStatusInitialized);
1780 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001781 __ Bind(slow_path->GetExitLabel());
1782}
Alexandre Rames5319def2014-10-23 10:03:10 +01001783
Roland Levillain44015862016-01-22 11:47:17 +00001784void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001785 BarrierType type = BarrierAll;
1786
1787 switch (kind) {
1788 case MemBarrierKind::kAnyAny:
1789 case MemBarrierKind::kAnyStore: {
1790 type = BarrierAll;
1791 break;
1792 }
1793 case MemBarrierKind::kLoadAny: {
1794 type = BarrierReads;
1795 break;
1796 }
1797 case MemBarrierKind::kStoreStore: {
1798 type = BarrierWrites;
1799 break;
1800 }
1801 default:
1802 LOG(FATAL) << "Unexpected memory barrier " << kind;
1803 }
1804 __ Dmb(InnerShareable, type);
1805}
1806
Serban Constantinescu02164b32014-11-13 14:05:07 +00001807void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1808 HBasicBlock* successor) {
1809 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001810 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1811 if (slow_path == nullptr) {
1812 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1813 instruction->SetSlowPath(slow_path);
1814 codegen_->AddSlowPath(slow_path);
1815 if (successor != nullptr) {
1816 DCHECK(successor->IsLoopHeader());
1817 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1818 }
1819 } else {
1820 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1821 }
1822
Serban Constantinescu02164b32014-11-13 14:05:07 +00001823 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1824 Register temp = temps.AcquireW();
1825
Andreas Gampe542451c2016-07-26 09:02:02 -07001826 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001827 if (successor == nullptr) {
1828 __ Cbnz(temp, slow_path->GetEntryLabel());
1829 __ Bind(slow_path->GetReturnLabel());
1830 } else {
1831 __ Cbz(temp, codegen_->GetLabelOf(successor));
1832 __ B(slow_path->GetEntryLabel());
1833 // slow_path will return to GetLabelOf(successor).
1834 }
1835}
1836
Alexandre Rames5319def2014-10-23 10:03:10 +01001837InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1838 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001839 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001840 assembler_(codegen->GetAssembler()),
1841 codegen_(codegen) {}
1842
1843#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001844 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001845
1846#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1847
1848enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001849 // Using a base helps identify when we hit such breakpoints.
1850 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001851#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1852 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1853#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1854};
1855
1856#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001857 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001858 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1859 } \
1860 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1861 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1862 locations->SetOut(Location::Any()); \
1863 }
1864 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1865#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1866
1867#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001868#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001869
Alexandre Rames67555f72014-11-18 10:55:16 +00001870void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001871 DCHECK_EQ(instr->InputCount(), 2U);
1872 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1873 Primitive::Type type = instr->GetResultType();
1874 switch (type) {
1875 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001876 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001877 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001878 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001879 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001880 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001881
1882 case Primitive::kPrimFloat:
1883 case Primitive::kPrimDouble:
1884 locations->SetInAt(0, Location::RequiresFpuRegister());
1885 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001886 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001887 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001888
Alexandre Rames5319def2014-10-23 10:03:10 +01001889 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001890 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001891 }
1892}
1893
Alexandre Rames09a99962015-04-15 11:47:56 +01001894void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001895 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1896
1897 bool object_field_get_with_read_barrier =
1898 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001899 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001900 new (GetGraph()->GetArena()) LocationSummary(instruction,
1901 object_field_get_with_read_barrier ?
1902 LocationSummary::kCallOnSlowPath :
1903 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001904 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001905 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01001906 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001907 locations->SetInAt(0, Location::RequiresRegister());
1908 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1909 locations->SetOut(Location::RequiresFpuRegister());
1910 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001911 // The output overlaps for an object field get when read barriers
1912 // are enabled: we do not want the load to overwrite the object's
1913 // location, as we need it to emit the read barrier.
1914 locations->SetOut(
1915 Location::RequiresRegister(),
1916 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001917 }
1918}
1919
1920void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1921 const FieldInfo& field_info) {
1922 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001923 LocationSummary* locations = instruction->GetLocations();
1924 Location base_loc = locations->InAt(0);
1925 Location out = locations->Out();
1926 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001927 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001928 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001929 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001930
Roland Levillain44015862016-01-22 11:47:17 +00001931 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1932 // Object FieldGet with Baker's read barrier case.
1933 MacroAssembler* masm = GetVIXLAssembler();
1934 UseScratchRegisterScope temps(masm);
1935 // /* HeapReference<Object> */ out = *(base + offset)
1936 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1937 Register temp = temps.AcquireW();
1938 // Note that potential implicit null checks are handled in this
1939 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1940 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1941 instruction,
1942 out,
1943 base,
1944 offset,
1945 temp,
1946 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001947 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001948 } else {
1949 // General case.
1950 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001951 // Note that a potential implicit null check is handled in this
1952 // CodeGeneratorARM64::LoadAcquire call.
1953 // NB: LoadAcquire will record the pc info if needed.
1954 codegen_->LoadAcquire(
1955 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001956 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001957 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001958 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001959 }
Roland Levillain44015862016-01-22 11:47:17 +00001960 if (field_type == Primitive::kPrimNot) {
1961 // If read barriers are enabled, emit read barriers other than
1962 // Baker's using a slow path (and also unpoison the loaded
1963 // reference, if heap poisoning is enabled).
1964 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1965 }
Roland Levillain4d027112015-07-01 15:41:14 +01001966 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001967}
1968
1969void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1970 LocationSummary* locations =
1971 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1972 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001973 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1974 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1975 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001976 locations->SetInAt(1, Location::RequiresFpuRegister());
1977 } else {
1978 locations->SetInAt(1, Location::RequiresRegister());
1979 }
1980}
1981
1982void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001983 const FieldInfo& field_info,
1984 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001985 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001986 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001987
1988 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001989 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001990 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001991 Offset offset = field_info.GetFieldOffset();
1992 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001993
Roland Levillain4d027112015-07-01 15:41:14 +01001994 {
1995 // We use a block to end the scratch scope before the write barrier, thus
1996 // freeing the temporary registers so they can be used in `MarkGCCard`.
1997 UseScratchRegisterScope temps(GetVIXLAssembler());
1998
1999 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
2000 DCHECK(value.IsW());
2001 Register temp = temps.AcquireW();
2002 __ Mov(temp, value.W());
2003 GetAssembler()->PoisonHeapReference(temp.W());
2004 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002005 }
Roland Levillain4d027112015-07-01 15:41:14 +01002006
2007 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002008 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
2009 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01002010 } else {
2011 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2012 codegen_->MaybeRecordImplicitNullCheck(instruction);
2013 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002014 }
2015
2016 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002017 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002018 }
2019}
2020
Alexandre Rames67555f72014-11-18 10:55:16 +00002021void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002022 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002023
2024 switch (type) {
2025 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002026 case Primitive::kPrimLong: {
2027 Register dst = OutputRegister(instr);
2028 Register lhs = InputRegisterAt(instr, 0);
2029 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002030 if (instr->IsAdd()) {
2031 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002032 } else if (instr->IsAnd()) {
2033 __ And(dst, lhs, rhs);
2034 } else if (instr->IsOr()) {
2035 __ Orr(dst, lhs, rhs);
2036 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002037 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002038 } else if (instr->IsRor()) {
2039 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002040 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002041 __ Ror(dst, lhs, shift);
2042 } else {
2043 // Ensure shift distance is in the same size register as the result. If
2044 // we are rotating a long and the shift comes in a w register originally,
2045 // we don't need to sxtw for use as an x since the shift distances are
2046 // all & reg_bits - 1.
2047 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2048 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002049 } else {
2050 DCHECK(instr->IsXor());
2051 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002052 }
2053 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002054 }
2055 case Primitive::kPrimFloat:
2056 case Primitive::kPrimDouble: {
2057 FPRegister dst = OutputFPRegister(instr);
2058 FPRegister lhs = InputFPRegisterAt(instr, 0);
2059 FPRegister rhs = InputFPRegisterAt(instr, 1);
2060 if (instr->IsAdd()) {
2061 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002062 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002063 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002064 } else {
2065 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002066 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002067 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002068 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002069 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002070 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002071 }
2072}
2073
Serban Constantinescu02164b32014-11-13 14:05:07 +00002074void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2075 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2076
2077 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2078 Primitive::Type type = instr->GetResultType();
2079 switch (type) {
2080 case Primitive::kPrimInt:
2081 case Primitive::kPrimLong: {
2082 locations->SetInAt(0, Location::RequiresRegister());
2083 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2084 locations->SetOut(Location::RequiresRegister());
2085 break;
2086 }
2087 default:
2088 LOG(FATAL) << "Unexpected shift type " << type;
2089 }
2090}
2091
2092void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2093 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2094
2095 Primitive::Type type = instr->GetType();
2096 switch (type) {
2097 case Primitive::kPrimInt:
2098 case Primitive::kPrimLong: {
2099 Register dst = OutputRegister(instr);
2100 Register lhs = InputRegisterAt(instr, 0);
2101 Operand rhs = InputOperandAt(instr, 1);
2102 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002103 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00002104 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002105 if (instr->IsShl()) {
2106 __ Lsl(dst, lhs, shift_value);
2107 } else if (instr->IsShr()) {
2108 __ Asr(dst, lhs, shift_value);
2109 } else {
2110 __ Lsr(dst, lhs, shift_value);
2111 }
2112 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002113 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002114
2115 if (instr->IsShl()) {
2116 __ Lsl(dst, lhs, rhs_reg);
2117 } else if (instr->IsShr()) {
2118 __ Asr(dst, lhs, rhs_reg);
2119 } else {
2120 __ Lsr(dst, lhs, rhs_reg);
2121 }
2122 }
2123 break;
2124 }
2125 default:
2126 LOG(FATAL) << "Unexpected shift operation type " << type;
2127 }
2128}
2129
Alexandre Rames5319def2014-10-23 10:03:10 +01002130void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002131 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132}
2133
2134void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002135 HandleBinaryOp(instruction);
2136}
2137
2138void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2139 HandleBinaryOp(instruction);
2140}
2141
2142void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2143 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002144}
2145
Artem Serov7fc63502016-02-09 17:15:29 +00002146void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002147 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
2148 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2149 locations->SetInAt(0, Location::RequiresRegister());
2150 // There is no immediate variant of negated bitwise instructions in AArch64.
2151 locations->SetInAt(1, Location::RequiresRegister());
2152 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2153}
2154
Artem Serov7fc63502016-02-09 17:15:29 +00002155void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002156 Register dst = OutputRegister(instr);
2157 Register lhs = InputRegisterAt(instr, 0);
2158 Register rhs = InputRegisterAt(instr, 1);
2159
2160 switch (instr->GetOpKind()) {
2161 case HInstruction::kAnd:
2162 __ Bic(dst, lhs, rhs);
2163 break;
2164 case HInstruction::kOr:
2165 __ Orn(dst, lhs, rhs);
2166 break;
2167 case HInstruction::kXor:
2168 __ Eon(dst, lhs, rhs);
2169 break;
2170 default:
2171 LOG(FATAL) << "Unreachable";
2172 }
2173}
2174
Alexandre Rames8626b742015-11-25 16:28:08 +00002175void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
2176 HArm64DataProcWithShifterOp* instruction) {
2177 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
2178 instruction->GetType() == Primitive::kPrimLong);
2179 LocationSummary* locations =
2180 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2181 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2182 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2183 } else {
2184 locations->SetInAt(0, Location::RequiresRegister());
2185 }
2186 locations->SetInAt(1, Location::RequiresRegister());
2187 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2188}
2189
2190void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
2191 HArm64DataProcWithShifterOp* instruction) {
2192 Primitive::Type type = instruction->GetType();
2193 HInstruction::InstructionKind kind = instruction->GetInstrKind();
2194 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2195 Register out = OutputRegister(instruction);
2196 Register left;
2197 if (kind != HInstruction::kNeg) {
2198 left = InputRegisterAt(instruction, 0);
2199 }
2200 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
2201 // shifter operand operation, the IR generating `right_reg` (input to the type
2202 // conversion) can have a different type from the current instruction's type,
2203 // so we manually indicate the type.
2204 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00002205 int64_t shift_amount = instruction->GetShiftAmount() &
2206 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00002207
2208 Operand right_operand(0);
2209
2210 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2211 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
2212 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2213 } else {
2214 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
2215 }
2216
2217 // Logical binary operations do not support extension operations in the
2218 // operand. Note that VIXL would still manage if it was passed by generating
2219 // the extension as a separate instruction.
2220 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2221 DCHECK(!right_operand.IsExtendedRegister() ||
2222 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2223 kind != HInstruction::kNeg));
2224 switch (kind) {
2225 case HInstruction::kAdd:
2226 __ Add(out, left, right_operand);
2227 break;
2228 case HInstruction::kAnd:
2229 __ And(out, left, right_operand);
2230 break;
2231 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002232 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002233 __ Neg(out, right_operand);
2234 break;
2235 case HInstruction::kOr:
2236 __ Orr(out, left, right_operand);
2237 break;
2238 case HInstruction::kSub:
2239 __ Sub(out, left, right_operand);
2240 break;
2241 case HInstruction::kXor:
2242 __ Eor(out, left, right_operand);
2243 break;
2244 default:
2245 LOG(FATAL) << "Unexpected operation kind: " << kind;
2246 UNREACHABLE();
2247 }
2248}
2249
Artem Serov328429f2016-07-06 16:23:04 +01002250void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002251 LocationSummary* locations =
2252 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2253 locations->SetInAt(0, Location::RequiresRegister());
2254 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2255 locations->SetOut(Location::RequiresRegister());
2256}
2257
Roland Levillain19c54192016-11-04 13:44:09 +00002258void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002259 __ Add(OutputRegister(instruction),
2260 InputRegisterAt(instruction, 0),
2261 Operand(InputOperandAt(instruction, 1)));
2262}
2263
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002264void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002265 LocationSummary* locations =
2266 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002267 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2268 if (instr->GetOpKind() == HInstruction::kSub &&
2269 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002270 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002271 // Don't allocate register for Mneg instruction.
2272 } else {
2273 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2274 Location::RequiresRegister());
2275 }
2276 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2277 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002278 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2279}
2280
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002281void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002282 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002283 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2284 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002285
2286 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2287 // This fixup should be carried out for all multiply-accumulate instructions:
2288 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2289 if (instr->GetType() == Primitive::kPrimLong &&
2290 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2291 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002292 vixl::aarch64::Instruction* prev =
2293 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002294 if (prev->IsLoadOrStore()) {
2295 // Make sure we emit only exactly one nop.
Scott Wakelingb77051e2016-11-21 19:46:00 +00002296 vixl::CodeBufferCheckScope scope(masm,
2297 kInstructionSize,
2298 vixl::CodeBufferCheckScope::kReserveBufferSpace,
2299 vixl::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002300 __ nop();
2301 }
2302 }
2303
2304 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002305 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002306 __ Madd(res, mul_left, mul_right, accumulator);
2307 } else {
2308 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002309 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002310 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002311 __ Mneg(res, mul_left, mul_right);
2312 } else {
2313 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2314 __ Msub(res, mul_left, mul_right, accumulator);
2315 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002316 }
2317}
2318
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002319void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002320 bool object_array_get_with_read_barrier =
2321 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002322 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002323 new (GetGraph()->GetArena()) LocationSummary(instruction,
2324 object_array_get_with_read_barrier ?
2325 LocationSummary::kCallOnSlowPath :
2326 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002327 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002328 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002329 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002330 locations->SetInAt(0, Location::RequiresRegister());
2331 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002332 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2333 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2334 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002335 // The output overlaps in the case of an object array get with
2336 // read barriers enabled: we do not want the move to overwrite the
2337 // array's location, as we need it to emit the read barrier.
2338 locations->SetOut(
2339 Location::RequiresRegister(),
2340 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002341 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002342}
2343
2344void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002345 Primitive::Type type = instruction->GetType();
2346 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002347 LocationSummary* locations = instruction->GetLocations();
2348 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002349 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002350 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002351 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2352 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002353 MacroAssembler* masm = GetVIXLAssembler();
2354 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002355 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002356 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002357
Roland Levillain19c54192016-11-04 13:44:09 +00002358 // The read barrier instrumentation of object ArrayGet instructions
2359 // does not support the HIntermediateAddress instruction.
2360 DCHECK(!((type == Primitive::kPrimNot) &&
2361 instruction->GetArray()->IsIntermediateAddress() &&
2362 kEmitCompilerReadBarrier));
2363
Roland Levillain44015862016-01-22 11:47:17 +00002364 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2365 // Object ArrayGet with Baker's read barrier case.
2366 Register temp = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00002367 // Note that a potential implicit null check is handled in the
2368 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2369 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2370 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002371 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002372 // General case.
2373 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002374 Register length;
2375 if (maybe_compressed_char_at) {
2376 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2377 length = temps.AcquireW();
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002378 if (instruction->GetArray()->IsIntermediateAddress()) {
2379 DCHECK_LT(count_offset, offset);
2380 int64_t adjusted_offset = static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2381 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2382 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2383 } else {
2384 __ Ldr(length, HeapOperand(obj, count_offset));
2385 }
jessicahandojo05765752016-09-09 19:01:32 -07002386 codegen_->MaybeRecordImplicitNullCheck(instruction);
2387 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002388 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002389 if (maybe_compressed_char_at) {
2390 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002391 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2392 "Expecting 0=compressed, 1=uncompressed");
2393 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002394 __ Ldrb(Register(OutputCPURegister(instruction)),
2395 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2396 __ B(&done);
2397 __ Bind(&uncompressed_load);
2398 __ Ldrh(Register(OutputCPURegister(instruction)),
2399 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2400 __ Bind(&done);
2401 } else {
2402 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2403 source = HeapOperand(obj, offset);
2404 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002405 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002406 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002407 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002408 // We do not need to compute the intermediate address from the array: the
2409 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002410 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002411 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002412 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002413 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2414 }
2415 temp = obj;
2416 } else {
2417 __ Add(temp, obj, offset);
2418 }
jessicahandojo05765752016-09-09 19:01:32 -07002419 if (maybe_compressed_char_at) {
2420 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002421 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2422 "Expecting 0=compressed, 1=uncompressed");
2423 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002424 __ Ldrb(Register(OutputCPURegister(instruction)),
2425 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2426 __ B(&done);
2427 __ Bind(&uncompressed_load);
2428 __ Ldrh(Register(OutputCPURegister(instruction)),
2429 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2430 __ Bind(&done);
2431 } else {
2432 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2433 }
Roland Levillain44015862016-01-22 11:47:17 +00002434 }
jessicahandojo05765752016-09-09 19:01:32 -07002435 if (!maybe_compressed_char_at) {
2436 codegen_->Load(type, OutputCPURegister(instruction), source);
2437 codegen_->MaybeRecordImplicitNullCheck(instruction);
2438 }
Roland Levillain44015862016-01-22 11:47:17 +00002439
2440 if (type == Primitive::kPrimNot) {
2441 static_assert(
2442 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2443 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2444 Location obj_loc = locations->InAt(0);
2445 if (index.IsConstant()) {
2446 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2447 } else {
2448 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2449 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002450 }
Roland Levillain4d027112015-07-01 15:41:14 +01002451 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002452}
2453
Alexandre Rames5319def2014-10-23 10:03:10 +01002454void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2455 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2456 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002457 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002458}
2459
2460void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002461 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002462 vixl::aarch64::Register out = OutputRegister(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002463 BlockPoolsScope block_pools(GetVIXLAssembler());
jessicahandojo05765752016-09-09 19:01:32 -07002464 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002465 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002466 // Mask out compression flag from String's array length.
2467 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002468 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002469 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002470}
2471
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002472void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002473 Primitive::Type value_type = instruction->GetComponentType();
2474
2475 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002476 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2477 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002478 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002479 LocationSummary::kCallOnSlowPath :
2480 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002481 locations->SetInAt(0, Location::RequiresRegister());
2482 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002483 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2484 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2485 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002486 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002487 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002488 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002489 }
2490}
2491
2492void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2493 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002494 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002495 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002496 bool needs_write_barrier =
2497 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002498
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002499 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002500 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002501 CPURegister source = value;
2502 Location index = locations->InAt(1);
2503 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2504 MemOperand destination = HeapOperand(array);
2505 MacroAssembler* masm = GetVIXLAssembler();
2506 BlockPoolsScope block_pools(masm);
2507
2508 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002509 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002510 if (index.IsConstant()) {
2511 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2512 destination = HeapOperand(array, offset);
2513 } else {
2514 UseScratchRegisterScope temps(masm);
2515 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002516 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002517 // We do not need to compute the intermediate address from the array: the
2518 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002519 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002520 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002521 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002522 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2523 }
2524 temp = array;
2525 } else {
2526 __ Add(temp, array, offset);
2527 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002528 destination = HeapOperand(temp,
2529 XRegisterFrom(index),
2530 LSL,
2531 Primitive::ComponentSizeShift(value_type));
2532 }
2533 codegen_->Store(value_type, value, destination);
2534 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002535 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002536 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002537 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002538 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002539 {
2540 // We use a block to end the scratch scope before the write barrier, thus
2541 // freeing the temporary registers so they can be used in `MarkGCCard`.
2542 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002543 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002544 if (index.IsConstant()) {
2545 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002546 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002547 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002548 destination = HeapOperand(temp,
2549 XRegisterFrom(index),
2550 LSL,
2551 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002552 }
2553
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002554 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2555 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2556 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2557
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002558 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002559 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2560 codegen_->AddSlowPath(slow_path);
2561 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002562 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002563 __ Cbnz(Register(value), &non_zero);
2564 if (!index.IsConstant()) {
2565 __ Add(temp, array, offset);
2566 }
2567 __ Str(wzr, destination);
2568 codegen_->MaybeRecordImplicitNullCheck(instruction);
2569 __ B(&done);
2570 __ Bind(&non_zero);
2571 }
2572
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002573 // Note that when Baker read barriers are enabled, the type
2574 // checks are performed without read barriers. This is fine,
2575 // even in the case where a class object is in the from-space
2576 // after the flip, as a comparison involving such a type would
2577 // not produce a false positive; it may of course produce a
2578 // false negative, in which case we would take the ArraySet
2579 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002580
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002581 Register temp2 = temps.AcquireSameSizeAs(array);
2582 // /* HeapReference<Class> */ temp = array->klass_
2583 __ Ldr(temp, HeapOperand(array, class_offset));
2584 codegen_->MaybeRecordImplicitNullCheck(instruction);
2585 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002586
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002587 // /* HeapReference<Class> */ temp = temp->component_type_
2588 __ Ldr(temp, HeapOperand(temp, component_offset));
2589 // /* HeapReference<Class> */ temp2 = value->klass_
2590 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2591 // If heap poisoning is enabled, no need to unpoison `temp`
2592 // nor `temp2`, as we are comparing two poisoned references.
2593 __ Cmp(temp, temp2);
2594 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002595
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002596 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2597 vixl::aarch64::Label do_put;
2598 __ B(eq, &do_put);
2599 // If heap poisoning is enabled, the `temp` reference has
2600 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002601 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2602
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002603 // /* HeapReference<Class> */ temp = temp->super_class_
2604 __ Ldr(temp, HeapOperand(temp, super_offset));
2605 // If heap poisoning is enabled, no need to unpoison
2606 // `temp`, as we are comparing against null below.
2607 __ Cbnz(temp, slow_path->GetEntryLabel());
2608 __ Bind(&do_put);
2609 } else {
2610 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002611 }
2612 }
2613
2614 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002615 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002616 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002617 __ Mov(temp2, value.W());
2618 GetAssembler()->PoisonHeapReference(temp2);
2619 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002620 }
2621
2622 if (!index.IsConstant()) {
2623 __ Add(temp, array, offset);
2624 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002625 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002626
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002627 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002628 codegen_->MaybeRecordImplicitNullCheck(instruction);
2629 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002630 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002631
2632 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2633
2634 if (done.IsLinked()) {
2635 __ Bind(&done);
2636 }
2637
2638 if (slow_path != nullptr) {
2639 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002640 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002641 }
2642}
2643
Alexandre Rames67555f72014-11-18 10:55:16 +00002644void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002645 RegisterSet caller_saves = RegisterSet::Empty();
2646 InvokeRuntimeCallingConvention calling_convention;
2647 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2648 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2649 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002650 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002651 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002652}
2653
2654void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002655 BoundsCheckSlowPathARM64* slow_path =
2656 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002657 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002658 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2659 __ B(slow_path->GetEntryLabel(), hs);
2660}
2661
Alexandre Rames67555f72014-11-18 10:55:16 +00002662void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2663 LocationSummary* locations =
2664 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2665 locations->SetInAt(0, Location::RequiresRegister());
2666 if (check->HasUses()) {
2667 locations->SetOut(Location::SameAsFirstInput());
2668 }
2669}
2670
2671void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2672 // We assume the class is not null.
2673 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2674 check->GetLoadClass(), check, check->GetDexPc(), true);
2675 codegen_->AddSlowPath(slow_path);
2676 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2677}
2678
Roland Levillain1a653882016-03-18 18:05:57 +00002679static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2680 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2681 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2682}
2683
2684void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2685 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2686 Location rhs_loc = instruction->GetLocations()->InAt(1);
2687 if (rhs_loc.IsConstant()) {
2688 // 0.0 is the only immediate that can be encoded directly in
2689 // an FCMP instruction.
2690 //
2691 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2692 // specify that in a floating-point comparison, positive zero
2693 // and negative zero are considered equal, so we can use the
2694 // literal 0.0 for both cases here.
2695 //
2696 // Note however that some methods (Float.equal, Float.compare,
2697 // Float.compareTo, Double.equal, Double.compare,
2698 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2699 // StrictMath.min) consider 0.0 to be (strictly) greater than
2700 // -0.0. So if we ever translate calls to these methods into a
2701 // HCompare instruction, we must handle the -0.0 case with
2702 // care here.
2703 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2704 __ Fcmp(lhs_reg, 0.0);
2705 } else {
2706 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2707 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002708}
2709
Serban Constantinescu02164b32014-11-13 14:05:07 +00002710void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002711 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002712 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2713 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002714 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002715 case Primitive::kPrimBoolean:
2716 case Primitive::kPrimByte:
2717 case Primitive::kPrimShort:
2718 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002719 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002720 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002721 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002722 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002723 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2724 break;
2725 }
2726 case Primitive::kPrimFloat:
2727 case Primitive::kPrimDouble: {
2728 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002729 locations->SetInAt(1,
2730 IsFloatingPointZeroConstant(compare->InputAt(1))
2731 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2732 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002733 locations->SetOut(Location::RequiresRegister());
2734 break;
2735 }
2736 default:
2737 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2738 }
2739}
2740
2741void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2742 Primitive::Type in_type = compare->InputAt(0)->GetType();
2743
2744 // 0 if: left == right
2745 // 1 if: left > right
2746 // -1 if: left < right
2747 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002748 case Primitive::kPrimBoolean:
2749 case Primitive::kPrimByte:
2750 case Primitive::kPrimShort:
2751 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002752 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002753 case Primitive::kPrimLong: {
2754 Register result = OutputRegister(compare);
2755 Register left = InputRegisterAt(compare, 0);
2756 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002757 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002758 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2759 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002760 break;
2761 }
2762 case Primitive::kPrimFloat:
2763 case Primitive::kPrimDouble: {
2764 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002765 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002766 __ Cset(result, ne);
2767 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002768 break;
2769 }
2770 default:
2771 LOG(FATAL) << "Unimplemented compare type " << in_type;
2772 }
2773}
2774
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002775void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002777
2778 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2779 locations->SetInAt(0, Location::RequiresFpuRegister());
2780 locations->SetInAt(1,
2781 IsFloatingPointZeroConstant(instruction->InputAt(1))
2782 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2783 : Location::RequiresFpuRegister());
2784 } else {
2785 // Integer cases.
2786 locations->SetInAt(0, Location::RequiresRegister());
2787 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2788 }
2789
David Brazdilb3e773e2016-01-26 11:28:37 +00002790 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002791 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002792 }
2793}
2794
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002795void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002796 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002797 return;
2798 }
2799
2800 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002801 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002802 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002803
Roland Levillain7f63c522015-07-13 15:54:55 +00002804 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002805 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002806 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002807 } else {
2808 // Integer cases.
2809 Register lhs = InputRegisterAt(instruction, 0);
2810 Operand rhs = InputOperandAt(instruction, 1);
2811 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002812 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002813 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002814}
2815
2816#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2817 M(Equal) \
2818 M(NotEqual) \
2819 M(LessThan) \
2820 M(LessThanOrEqual) \
2821 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002822 M(GreaterThanOrEqual) \
2823 M(Below) \
2824 M(BelowOrEqual) \
2825 M(Above) \
2826 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002827#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002828void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2829void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002830FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002831#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002832#undef FOR_EACH_CONDITION_INSTRUCTION
2833
Zheng Xuc6667102015-05-15 16:08:45 +08002834void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2835 DCHECK(instruction->IsDiv() || instruction->IsRem());
2836
2837 LocationSummary* locations = instruction->GetLocations();
2838 Location second = locations->InAt(1);
2839 DCHECK(second.IsConstant());
2840
2841 Register out = OutputRegister(instruction);
2842 Register dividend = InputRegisterAt(instruction, 0);
2843 int64_t imm = Int64FromConstant(second.GetConstant());
2844 DCHECK(imm == 1 || imm == -1);
2845
2846 if (instruction->IsRem()) {
2847 __ Mov(out, 0);
2848 } else {
2849 if (imm == 1) {
2850 __ Mov(out, dividend);
2851 } else {
2852 __ Neg(out, dividend);
2853 }
2854 }
2855}
2856
2857void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2858 DCHECK(instruction->IsDiv() || instruction->IsRem());
2859
2860 LocationSummary* locations = instruction->GetLocations();
2861 Location second = locations->InAt(1);
2862 DCHECK(second.IsConstant());
2863
2864 Register out = OutputRegister(instruction);
2865 Register dividend = InputRegisterAt(instruction, 0);
2866 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002867 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002868 int ctz_imm = CTZ(abs_imm);
2869
2870 UseScratchRegisterScope temps(GetVIXLAssembler());
2871 Register temp = temps.AcquireSameSizeAs(out);
2872
2873 if (instruction->IsDiv()) {
2874 __ Add(temp, dividend, abs_imm - 1);
2875 __ Cmp(dividend, 0);
2876 __ Csel(out, temp, dividend, lt);
2877 if (imm > 0) {
2878 __ Asr(out, out, ctz_imm);
2879 } else {
2880 __ Neg(out, Operand(out, ASR, ctz_imm));
2881 }
2882 } else {
2883 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2884 __ Asr(temp, dividend, bits - 1);
2885 __ Lsr(temp, temp, bits - ctz_imm);
2886 __ Add(out, dividend, temp);
2887 __ And(out, out, abs_imm - 1);
2888 __ Sub(out, out, temp);
2889 }
2890}
2891
2892void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2893 DCHECK(instruction->IsDiv() || instruction->IsRem());
2894
2895 LocationSummary* locations = instruction->GetLocations();
2896 Location second = locations->InAt(1);
2897 DCHECK(second.IsConstant());
2898
2899 Register out = OutputRegister(instruction);
2900 Register dividend = InputRegisterAt(instruction, 0);
2901 int64_t imm = Int64FromConstant(second.GetConstant());
2902
2903 Primitive::Type type = instruction->GetResultType();
2904 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2905
2906 int64_t magic;
2907 int shift;
2908 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2909
2910 UseScratchRegisterScope temps(GetVIXLAssembler());
2911 Register temp = temps.AcquireSameSizeAs(out);
2912
2913 // temp = get_high(dividend * magic)
2914 __ Mov(temp, magic);
2915 if (type == Primitive::kPrimLong) {
2916 __ Smulh(temp, dividend, temp);
2917 } else {
2918 __ Smull(temp.X(), dividend, temp);
2919 __ Lsr(temp.X(), temp.X(), 32);
2920 }
2921
2922 if (imm > 0 && magic < 0) {
2923 __ Add(temp, temp, dividend);
2924 } else if (imm < 0 && magic > 0) {
2925 __ Sub(temp, temp, dividend);
2926 }
2927
2928 if (shift != 0) {
2929 __ Asr(temp, temp, shift);
2930 }
2931
2932 if (instruction->IsDiv()) {
2933 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2934 } else {
2935 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2936 // TODO: Strength reduction for msub.
2937 Register temp_imm = temps.AcquireSameSizeAs(out);
2938 __ Mov(temp_imm, imm);
2939 __ Msub(out, temp, temp_imm, dividend);
2940 }
2941}
2942
2943void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2944 DCHECK(instruction->IsDiv() || instruction->IsRem());
2945 Primitive::Type type = instruction->GetResultType();
2946 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2947
2948 LocationSummary* locations = instruction->GetLocations();
2949 Register out = OutputRegister(instruction);
2950 Location second = locations->InAt(1);
2951
2952 if (second.IsConstant()) {
2953 int64_t imm = Int64FromConstant(second.GetConstant());
2954
2955 if (imm == 0) {
2956 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2957 } else if (imm == 1 || imm == -1) {
2958 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002959 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002960 DivRemByPowerOfTwo(instruction);
2961 } else {
2962 DCHECK(imm <= -2 || imm >= 2);
2963 GenerateDivRemWithAnyConstant(instruction);
2964 }
2965 } else {
2966 Register dividend = InputRegisterAt(instruction, 0);
2967 Register divisor = InputRegisterAt(instruction, 1);
2968 if (instruction->IsDiv()) {
2969 __ Sdiv(out, dividend, divisor);
2970 } else {
2971 UseScratchRegisterScope temps(GetVIXLAssembler());
2972 Register temp = temps.AcquireSameSizeAs(out);
2973 __ Sdiv(temp, dividend, divisor);
2974 __ Msub(out, temp, divisor, dividend);
2975 }
2976 }
2977}
2978
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002979void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2980 LocationSummary* locations =
2981 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2982 switch (div->GetResultType()) {
2983 case Primitive::kPrimInt:
2984 case Primitive::kPrimLong:
2985 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002986 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002987 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2988 break;
2989
2990 case Primitive::kPrimFloat:
2991 case Primitive::kPrimDouble:
2992 locations->SetInAt(0, Location::RequiresFpuRegister());
2993 locations->SetInAt(1, Location::RequiresFpuRegister());
2994 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2995 break;
2996
2997 default:
2998 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2999 }
3000}
3001
3002void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
3003 Primitive::Type type = div->GetResultType();
3004 switch (type) {
3005 case Primitive::kPrimInt:
3006 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08003007 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003008 break;
3009
3010 case Primitive::kPrimFloat:
3011 case Primitive::kPrimDouble:
3012 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3013 break;
3014
3015 default:
3016 LOG(FATAL) << "Unexpected div type " << type;
3017 }
3018}
3019
Alexandre Rames67555f72014-11-18 10:55:16 +00003020void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003021 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003022 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003023}
3024
3025void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3026 SlowPathCodeARM64* slow_path =
3027 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
3028 codegen_->AddSlowPath(slow_path);
3029 Location value = instruction->GetLocations()->InAt(0);
3030
Alexandre Rames3e69f162014-12-10 10:36:50 +00003031 Primitive::Type type = instruction->GetType();
3032
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003033 if (!Primitive::IsIntegralType(type)) {
3034 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003035 return;
3036 }
3037
Alexandre Rames67555f72014-11-18 10:55:16 +00003038 if (value.IsConstant()) {
3039 int64_t divisor = Int64ConstantFrom(value);
3040 if (divisor == 0) {
3041 __ B(slow_path->GetEntryLabel());
3042 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003043 // A division by a non-null constant is valid. We don't need to perform
3044 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003045 }
3046 } else {
3047 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3048 }
3049}
3050
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003051void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3052 LocationSummary* locations =
3053 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3054 locations->SetOut(Location::ConstantLocation(constant));
3055}
3056
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003057void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3058 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003059 // Will be generated at use site.
3060}
3061
Alexandre Rames5319def2014-10-23 10:03:10 +01003062void LocationsBuilderARM64::VisitExit(HExit* exit) {
3063 exit->SetLocations(nullptr);
3064}
3065
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003066void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003067}
3068
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003069void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3070 LocationSummary* locations =
3071 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3072 locations->SetOut(Location::ConstantLocation(constant));
3073}
3074
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003075void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003076 // Will be generated at use site.
3077}
3078
David Brazdilfc6a86a2015-06-26 10:33:45 +00003079void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003080 DCHECK(!successor->IsExitBlock());
3081 HBasicBlock* block = got->GetBlock();
3082 HInstruction* previous = got->GetPrevious();
3083 HLoopInformation* info = block->GetLoopInformation();
3084
David Brazdil46e2a392015-03-16 17:31:52 +00003085 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003086 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3087 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3088 return;
3089 }
3090 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3091 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3092 }
3093 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003094 __ B(codegen_->GetLabelOf(successor));
3095 }
3096}
3097
David Brazdilfc6a86a2015-06-26 10:33:45 +00003098void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3099 got->SetLocations(nullptr);
3100}
3101
3102void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3103 HandleGoto(got, got->GetSuccessor());
3104}
3105
3106void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3107 try_boundary->SetLocations(nullptr);
3108}
3109
3110void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3111 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3112 if (!successor->IsExitBlock()) {
3113 HandleGoto(try_boundary, successor);
3114 }
3115}
3116
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003117void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003118 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003119 vixl::aarch64::Label* true_target,
3120 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003121 // FP branching requires both targets to be explicit. If either of the targets
3122 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003123 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003124 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003125
David Brazdil0debae72015-11-12 18:37:00 +00003126 if (true_target == nullptr && false_target == nullptr) {
3127 // Nothing to do. The code always falls through.
3128 return;
3129 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003130 // Constant condition, statically compared against "true" (integer value 1).
3131 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003132 if (true_target != nullptr) {
3133 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003134 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003135 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003136 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003137 if (false_target != nullptr) {
3138 __ B(false_target);
3139 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003140 }
David Brazdil0debae72015-11-12 18:37:00 +00003141 return;
3142 }
3143
3144 // The following code generates these patterns:
3145 // (1) true_target == nullptr && false_target != nullptr
3146 // - opposite condition true => branch to false_target
3147 // (2) true_target != nullptr && false_target == nullptr
3148 // - condition true => branch to true_target
3149 // (3) true_target != nullptr && false_target != nullptr
3150 // - condition true => branch to true_target
3151 // - branch to false_target
3152 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003153 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003154 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003155 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003156 if (true_target == nullptr) {
3157 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3158 } else {
3159 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3160 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003161 } else {
3162 // The condition instruction has not been materialized, use its inputs as
3163 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003164 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003165
David Brazdil0debae72015-11-12 18:37:00 +00003166 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00003167 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003168 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003169 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003170 IfCondition opposite_condition = condition->GetOppositeCondition();
3171 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003172 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003173 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003174 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003175 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003176 // Integer cases.
3177 Register lhs = InputRegisterAt(condition, 0);
3178 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003179
3180 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003181 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003182 if (true_target == nullptr) {
3183 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3184 non_fallthrough_target = false_target;
3185 } else {
3186 arm64_cond = ARM64Condition(condition->GetCondition());
3187 non_fallthrough_target = true_target;
3188 }
3189
Aart Bik086d27e2016-01-20 17:02:00 -08003190 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003191 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003192 switch (arm64_cond) {
3193 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003194 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003195 break;
3196 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003197 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003198 break;
3199 case lt:
3200 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003201 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003202 break;
3203 case ge:
3204 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003205 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003206 break;
3207 default:
3208 // Without the `static_cast` the compiler throws an error for
3209 // `-Werror=sign-promo`.
3210 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3211 }
3212 } else {
3213 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003214 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003215 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003216 }
3217 }
David Brazdil0debae72015-11-12 18:37:00 +00003218
3219 // If neither branch falls through (case 3), the conditional branch to `true_target`
3220 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3221 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003222 __ B(false_target);
3223 }
David Brazdil0debae72015-11-12 18:37:00 +00003224
3225 if (fallthrough_target.IsLinked()) {
3226 __ Bind(&fallthrough_target);
3227 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003228}
3229
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003230void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3231 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003232 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003233 locations->SetInAt(0, Location::RequiresRegister());
3234 }
3235}
3236
3237void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003238 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3239 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003240 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3241 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3242 true_target = nullptr;
3243 }
3244 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3245 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3246 false_target = nullptr;
3247 }
David Brazdil0debae72015-11-12 18:37:00 +00003248 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003249}
3250
3251void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3252 LocationSummary* locations = new (GetGraph()->GetArena())
3253 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003254 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003255 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003256 locations->SetInAt(0, Location::RequiresRegister());
3257 }
3258}
3259
3260void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003261 SlowPathCodeARM64* slow_path =
3262 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003263 GenerateTestAndBranch(deoptimize,
3264 /* condition_input_index */ 0,
3265 slow_path->GetEntryLabel(),
3266 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003267}
3268
Mingyao Yang063fc772016-08-02 11:02:54 -07003269void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3270 LocationSummary* locations = new (GetGraph()->GetArena())
3271 LocationSummary(flag, LocationSummary::kNoCall);
3272 locations->SetOut(Location::RequiresRegister());
3273}
3274
3275void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3276 __ Ldr(OutputRegister(flag),
3277 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3278}
3279
David Brazdilc0b601b2016-02-08 14:20:45 +00003280static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3281 return condition->IsCondition() &&
3282 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3283}
3284
Alexandre Rames880f1192016-06-13 16:04:50 +01003285static inline Condition GetConditionForSelect(HCondition* condition) {
3286 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003287 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3288 : ARM64Condition(cond);
3289}
3290
David Brazdil74eb1b22015-12-14 11:44:01 +00003291void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3292 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003293 if (Primitive::IsFloatingPointType(select->GetType())) {
3294 locations->SetInAt(0, Location::RequiresFpuRegister());
3295 locations->SetInAt(1, Location::RequiresFpuRegister());
3296 locations->SetOut(Location::RequiresFpuRegister());
3297 } else {
3298 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3299 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3300 bool is_true_value_constant = cst_true_value != nullptr;
3301 bool is_false_value_constant = cst_false_value != nullptr;
3302 // Ask VIXL whether we should synthesize constants in registers.
3303 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3304 Operand true_op = is_true_value_constant ?
3305 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3306 Operand false_op = is_false_value_constant ?
3307 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3308 bool true_value_in_register = false;
3309 bool false_value_in_register = false;
3310 MacroAssembler::GetCselSynthesisInformation(
3311 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3312 true_value_in_register |= !is_true_value_constant;
3313 false_value_in_register |= !is_false_value_constant;
3314
3315 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3316 : Location::ConstantLocation(cst_true_value));
3317 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3318 : Location::ConstantLocation(cst_false_value));
3319 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003320 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003321
David Brazdil74eb1b22015-12-14 11:44:01 +00003322 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3323 locations->SetInAt(2, Location::RequiresRegister());
3324 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003325}
3326
3327void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003328 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003329 Condition csel_cond;
3330
3331 if (IsBooleanValueOrMaterializedCondition(cond)) {
3332 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003333 // Use the condition flags set by the previous instruction.
3334 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003335 } else {
3336 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003337 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003338 }
3339 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003340 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003341 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003342 } else {
3343 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003344 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003345 }
3346
Alexandre Rames880f1192016-06-13 16:04:50 +01003347 if (Primitive::IsFloatingPointType(select->GetType())) {
3348 __ Fcsel(OutputFPRegister(select),
3349 InputFPRegisterAt(select, 1),
3350 InputFPRegisterAt(select, 0),
3351 csel_cond);
3352 } else {
3353 __ Csel(OutputRegister(select),
3354 InputOperandAt(select, 1),
3355 InputOperandAt(select, 0),
3356 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003357 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003358}
3359
David Srbecky0cf44932015-12-09 14:09:59 +00003360void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3361 new (GetGraph()->GetArena()) LocationSummary(info);
3362}
3363
David Srbeckyd28f4a02016-03-14 17:14:24 +00003364void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3365 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003366}
3367
3368void CodeGeneratorARM64::GenerateNop() {
3369 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003370}
3371
Alexandre Rames5319def2014-10-23 10:03:10 +01003372void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003373 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003374}
3375
3376void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003377 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003378}
3379
3380void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003381 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003382}
3383
3384void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003385 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003386}
3387
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003388// Temp is used for read barrier.
3389static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3390 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003391 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003392 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3393 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3394 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3395 return 1;
3396 }
3397 return 0;
3398}
3399
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003400// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003401// interface pointer, one for loading the current interface.
3402// The other checks have one temp for loading the object's class.
3403static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3404 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3405 return 3;
3406 }
3407 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003408}
3409
Alexandre Rames67555f72014-11-18 10:55:16 +00003410void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003411 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003412 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003413 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003414 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003415 case TypeCheckKind::kExactCheck:
3416 case TypeCheckKind::kAbstractClassCheck:
3417 case TypeCheckKind::kClassHierarchyCheck:
3418 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003419 call_kind =
3420 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003421 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003422 break;
3423 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424 case TypeCheckKind::kUnresolvedCheck:
3425 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003426 call_kind = LocationSummary::kCallOnSlowPath;
3427 break;
3428 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003429
Alexandre Rames67555f72014-11-18 10:55:16 +00003430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003431 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003432 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003433 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003434 locations->SetInAt(0, Location::RequiresRegister());
3435 locations->SetInAt(1, Location::RequiresRegister());
3436 // The "out" register is used as a temporary, so it overlaps with the inputs.
3437 // Note that TypeCheckSlowPathARM64 uses this register too.
3438 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003439 // Add temps if necessary for read barriers.
3440 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003441}
3442
3443void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003444 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003445 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003446 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003447 Register obj = InputRegisterAt(instruction, 0);
3448 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003449 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003450 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003451 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3452 DCHECK_LE(num_temps, 1u);
3453 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003454 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3455 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3456 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3457 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003458
Scott Wakeling97c72b72016-06-24 16:19:36 +01003459 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003460 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003461
3462 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003463 // Avoid null check if we know `obj` is not null.
3464 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003465 __ Cbz(obj, &zero);
3466 }
3467
Roland Levillain44015862016-01-22 11:47:17 +00003468 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003469 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003470 // /* HeapReference<Class> */ out = obj->klass_
3471 GenerateReferenceLoadTwoRegisters(instruction,
3472 out_loc,
3473 obj_loc,
3474 class_offset,
3475 maybe_temp_loc,
3476 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003477 __ Cmp(out, cls);
3478 __ Cset(out, eq);
3479 if (zero.IsLinked()) {
3480 __ B(&done);
3481 }
3482 break;
3483 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003484
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003485 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003486 // /* HeapReference<Class> */ out = obj->klass_
3487 GenerateReferenceLoadTwoRegisters(instruction,
3488 out_loc,
3489 obj_loc,
3490 class_offset,
3491 maybe_temp_loc,
3492 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003493 // If the class is abstract, we eagerly fetch the super class of the
3494 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003495 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003496 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003498 GenerateReferenceLoadOneRegister(instruction,
3499 out_loc,
3500 super_offset,
3501 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003502 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003503 // If `out` is null, we use it for the result, and jump to `done`.
3504 __ Cbz(out, &done);
3505 __ Cmp(out, cls);
3506 __ B(ne, &loop);
3507 __ Mov(out, 1);
3508 if (zero.IsLinked()) {
3509 __ B(&done);
3510 }
3511 break;
3512 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003513
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003514 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003515 // /* HeapReference<Class> */ out = obj->klass_
3516 GenerateReferenceLoadTwoRegisters(instruction,
3517 out_loc,
3518 obj_loc,
3519 class_offset,
3520 maybe_temp_loc,
3521 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003522 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003523 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003524 __ Bind(&loop);
3525 __ Cmp(out, cls);
3526 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003527 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003528 GenerateReferenceLoadOneRegister(instruction,
3529 out_loc,
3530 super_offset,
3531 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003532 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003533 __ Cbnz(out, &loop);
3534 // If `out` is null, we use it for the result, and jump to `done`.
3535 __ B(&done);
3536 __ Bind(&success);
3537 __ Mov(out, 1);
3538 if (zero.IsLinked()) {
3539 __ B(&done);
3540 }
3541 break;
3542 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003543
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003544 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003545 // /* HeapReference<Class> */ out = obj->klass_
3546 GenerateReferenceLoadTwoRegisters(instruction,
3547 out_loc,
3548 obj_loc,
3549 class_offset,
3550 maybe_temp_loc,
3551 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003552 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003553 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003554 __ Cmp(out, cls);
3555 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003556 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003557 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003558 GenerateReferenceLoadOneRegister(instruction,
3559 out_loc,
3560 component_offset,
3561 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003562 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003563 // If `out` is null, we use it for the result, and jump to `done`.
3564 __ Cbz(out, &done);
3565 __ Ldrh(out, HeapOperand(out, primitive_offset));
3566 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3567 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003568 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003569 __ Mov(out, 1);
3570 __ B(&done);
3571 break;
3572 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003573
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003574 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003575 // No read barrier since the slow path will retry upon failure.
3576 // /* HeapReference<Class> */ out = obj->klass_
3577 GenerateReferenceLoadTwoRegisters(instruction,
3578 out_loc,
3579 obj_loc,
3580 class_offset,
3581 maybe_temp_loc,
3582 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003583 __ Cmp(out, cls);
3584 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003585 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3586 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003587 codegen_->AddSlowPath(slow_path);
3588 __ B(ne, slow_path->GetEntryLabel());
3589 __ Mov(out, 1);
3590 if (zero.IsLinked()) {
3591 __ B(&done);
3592 }
3593 break;
3594 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003595
Calin Juravle98893e12015-10-02 21:05:03 +01003596 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003597 case TypeCheckKind::kInterfaceCheck: {
3598 // Note that we indeed only call on slow path, but we always go
3599 // into the slow path for the unresolved and interface check
3600 // cases.
3601 //
3602 // We cannot directly call the InstanceofNonTrivial runtime
3603 // entry point without resorting to a type checking slow path
3604 // here (i.e. by calling InvokeRuntime directly), as it would
3605 // require to assign fixed registers for the inputs of this
3606 // HInstanceOf instruction (following the runtime calling
3607 // convention), which might be cluttered by the potential first
3608 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003609 //
3610 // TODO: Introduce a new runtime entry point taking the object
3611 // to test (instead of its class) as argument, and let it deal
3612 // with the read barrier issues. This will let us refactor this
3613 // case of the `switch` code as it was previously (with a direct
3614 // call to the runtime not using a type checking slow path).
3615 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003616 DCHECK(locations->OnlyCallsOnSlowPath());
3617 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3618 /* is_fatal */ false);
3619 codegen_->AddSlowPath(slow_path);
3620 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003621 if (zero.IsLinked()) {
3622 __ B(&done);
3623 }
3624 break;
3625 }
3626 }
3627
3628 if (zero.IsLinked()) {
3629 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003630 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003631 }
3632
3633 if (done.IsLinked()) {
3634 __ Bind(&done);
3635 }
3636
3637 if (slow_path != nullptr) {
3638 __ Bind(slow_path->GetExitLabel());
3639 }
3640}
3641
3642void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3643 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3644 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3645
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003646 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3647 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003648 case TypeCheckKind::kExactCheck:
3649 case TypeCheckKind::kAbstractClassCheck:
3650 case TypeCheckKind::kClassHierarchyCheck:
3651 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003652 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3653 LocationSummary::kCallOnSlowPath :
3654 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003655 break;
3656 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003657 case TypeCheckKind::kUnresolvedCheck:
3658 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003659 call_kind = LocationSummary::kCallOnSlowPath;
3660 break;
3661 }
3662
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003663 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3664 locations->SetInAt(0, Location::RequiresRegister());
3665 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003666 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3667 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003668}
3669
3670void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003671 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003672 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003673 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003674 Register obj = InputRegisterAt(instruction, 0);
3675 Register cls = InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003676 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3677 DCHECK_GE(num_temps, 1u);
3678 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003679 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003680 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3681 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003682 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003683 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3684 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3685 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3686 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3687 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3688 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3689 const uint32_t object_array_data_offset =
3690 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003691
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003692 bool is_type_check_slow_path_fatal = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003693 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3694 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3695 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003696 if (!kEmitCompilerReadBarrier) {
3697 is_type_check_slow_path_fatal =
3698 (type_check_kind == TypeCheckKind::kExactCheck ||
3699 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3700 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3701 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3702 !instruction->CanThrowIntoCatchBlock();
3703 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003704 SlowPathCodeARM64* type_check_slow_path =
3705 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3706 is_type_check_slow_path_fatal);
3707 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003708
Scott Wakeling97c72b72016-06-24 16:19:36 +01003709 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003710 // Avoid null check if we know obj is not null.
3711 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003712 __ Cbz(obj, &done);
3713 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003714
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003715 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003716 case TypeCheckKind::kExactCheck:
3717 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003718 // /* HeapReference<Class> */ temp = obj->klass_
3719 GenerateReferenceLoadTwoRegisters(instruction,
3720 temp_loc,
3721 obj_loc,
3722 class_offset,
3723 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003724 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003725
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003726 __ Cmp(temp, cls);
3727 // Jump to slow path for throwing the exception or doing a
3728 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003729 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003730 break;
3731 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003732
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003733 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003734 // /* HeapReference<Class> */ temp = obj->klass_
3735 GenerateReferenceLoadTwoRegisters(instruction,
3736 temp_loc,
3737 obj_loc,
3738 class_offset,
3739 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003740 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003741
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003742 // If the class is abstract, we eagerly fetch the super class of the
3743 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003744 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003745 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003746 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003747 GenerateReferenceLoadOneRegister(instruction,
3748 temp_loc,
3749 super_offset,
3750 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003751 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003752
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003753 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3754 // exception.
3755 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3756 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003757 __ Cmp(temp, cls);
3758 __ B(ne, &loop);
3759 break;
3760 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003762 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003763 // /* HeapReference<Class> */ temp = obj->klass_
3764 GenerateReferenceLoadTwoRegisters(instruction,
3765 temp_loc,
3766 obj_loc,
3767 class_offset,
3768 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003769 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003770
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003771 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003772 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003773 __ Bind(&loop);
3774 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003775 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003776
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003777 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003778 GenerateReferenceLoadOneRegister(instruction,
3779 temp_loc,
3780 super_offset,
3781 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003782 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003783
3784 // If the class reference currently in `temp` is not null, jump
3785 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003786 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003787 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003788 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003789 break;
3790 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003791
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003792 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003793 // /* HeapReference<Class> */ temp = obj->klass_
3794 GenerateReferenceLoadTwoRegisters(instruction,
3795 temp_loc,
3796 obj_loc,
3797 class_offset,
3798 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003799 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003800
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003801 // Do an exact check.
3802 __ Cmp(temp, cls);
3803 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003804
3805 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003806 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003807 GenerateReferenceLoadOneRegister(instruction,
3808 temp_loc,
3809 component_offset,
3810 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003811 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003812
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003813 // If the component type is null, jump to the slow path to throw the exception.
3814 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3815 // Otherwise, the object is indeed an array. Further check that this component type is not a
3816 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003817 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3818 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003819 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003820 break;
3821 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003822
Calin Juravle98893e12015-10-02 21:05:03 +01003823 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003824 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003825 //
3826 // We cannot directly call the CheckCast runtime entry point
3827 // without resorting to a type checking slow path here (i.e. by
3828 // calling InvokeRuntime directly), as it would require to
3829 // assign fixed registers for the inputs of this HInstanceOf
3830 // instruction (following the runtime calling convention), which
3831 // might be cluttered by the potential first read barrier
3832 // emission at the beginning of this method.
3833 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003834 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003835 case TypeCheckKind::kInterfaceCheck: {
3836 // /* HeapReference<Class> */ temp = obj->klass_
3837 GenerateReferenceLoadTwoRegisters(instruction,
3838 temp_loc,
3839 obj_loc,
3840 class_offset,
3841 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003842 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003843
3844 // /* HeapReference<Class> */ temp = temp->iftable_
3845 GenerateReferenceLoadTwoRegisters(instruction,
3846 temp_loc,
3847 temp_loc,
3848 iftable_offset,
3849 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003850 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003851 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003852 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003853 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003854 vixl::aarch64::Label start_loop;
3855 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003856 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003857 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3858 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003859 // Go to next interface.
3860 __ Add(temp, temp, 2 * kHeapReferenceSize);
3861 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003862 // Compare the classes and continue the loop if they do not match.
3863 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3864 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003865 break;
3866 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003867 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003868 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003869
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003870 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003871}
3872
Alexandre Rames5319def2014-10-23 10:03:10 +01003873void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3874 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3875 locations->SetOut(Location::ConstantLocation(constant));
3876}
3877
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003878void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003879 // Will be generated at use site.
3880}
3881
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003882void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3883 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3884 locations->SetOut(Location::ConstantLocation(constant));
3885}
3886
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003887void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003888 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003889}
3890
Calin Juravle175dc732015-08-25 15:42:32 +01003891void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3892 // The trampoline uses the same calling convention as dex calling conventions,
3893 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3894 // the method_idx.
3895 HandleInvoke(invoke);
3896}
3897
3898void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3899 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3900}
3901
Alexandre Rames5319def2014-10-23 10:03:10 +01003902void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003903 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003904 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003905}
3906
Alexandre Rames67555f72014-11-18 10:55:16 +00003907void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3908 HandleInvoke(invoke);
3909}
3910
3911void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3912 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003913 LocationSummary* locations = invoke->GetLocations();
3914 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003915 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003916 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003917 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003918
3919 // The register ip1 is required to be used for the hidden argument in
3920 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003921 MacroAssembler* masm = GetVIXLAssembler();
3922 UseScratchRegisterScope scratch_scope(masm);
3923 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003924 scratch_scope.Exclude(ip1);
3925 __ Mov(ip1, invoke->GetDexMethodIndex());
3926
Alexandre Rames67555f72014-11-18 10:55:16 +00003927 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003928 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003929 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003930 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003931 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003933 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003934 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003935 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003936 // Instead of simply (possibly) unpoisoning `temp` here, we should
3937 // emit a read barrier for the previous class reference load.
3938 // However this is not required in practice, as this is an
3939 // intermediate/temporary reference and because the current
3940 // concurrent copying collector keeps the from-space memory
3941 // intact/accessible until the end of the marking phase (the
3942 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003943 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003944 __ Ldr(temp,
3945 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3946 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003947 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003948 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003949 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003950 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003951 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003952 // lr();
3953 __ Blr(lr);
3954 DCHECK(!codegen_->IsLeafMethod());
3955 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3956}
3957
3958void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003959 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3960 if (intrinsic.TryDispatch(invoke)) {
3961 return;
3962 }
3963
Alexandre Rames67555f72014-11-18 10:55:16 +00003964 HandleInvoke(invoke);
3965}
3966
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003967void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003968 // Explicit clinit checks triggered by static invokes must have been pruned by
3969 // art::PrepareForRegisterAllocation.
3970 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003971
Andreas Gampe878d58c2015-01-15 23:24:00 -08003972 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3973 if (intrinsic.TryDispatch(invoke)) {
3974 return;
3975 }
3976
Alexandre Rames67555f72014-11-18 10:55:16 +00003977 HandleInvoke(invoke);
3978}
3979
Andreas Gampe878d58c2015-01-15 23:24:00 -08003980static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3981 if (invoke->GetLocations()->Intrinsified()) {
3982 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3983 intrinsic.Dispatch(invoke);
3984 return true;
3985 }
3986 return false;
3987}
3988
Vladimir Markodc151b22015-10-15 18:02:30 +01003989HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3990 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003991 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003992 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003993 return desired_dispatch_info;
3994}
3995
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003996void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003997 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003998 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3999 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004000 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4001 uint32_t offset =
4002 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004003 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004004 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004005 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004006 }
Vladimir Marko58155012015-08-19 12:49:41 +00004007 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004008 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004009 break;
4010 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4011 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004012 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004013 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004014 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4015 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004016 const DexFile& dex_file = invoke->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004017 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004018 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004019 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004020 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004021 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004022 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004023 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004024 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004025 }
Vladimir Marko58155012015-08-19 12:49:41 +00004026 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004027 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004028 Register reg = XRegisterFrom(temp);
4029 Register method_reg;
4030 if (current_method.IsRegister()) {
4031 method_reg = XRegisterFrom(current_method);
4032 } else {
4033 DCHECK(invoke->GetLocations()->Intrinsified());
4034 DCHECK(!current_method.IsValid());
4035 method_reg = reg;
4036 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
4037 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00004038
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004040 __ Ldr(reg.X(),
4041 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07004042 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004043 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01004044 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4045 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004046 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
4047 break;
4048 }
4049 }
4050
4051 switch (invoke->GetCodePtrLocation()) {
4052 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4053 __ Bl(&frame_entry_label_);
4054 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004055 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4056 // LR = callee_method->entry_point_from_quick_compiled_code_;
4057 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004058 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004059 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004060 // lr()
4061 __ Blr(lr);
4062 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004063 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004064
Andreas Gampe878d58c2015-01-15 23:24:00 -08004065 DCHECK(!IsLeafMethod());
4066}
4067
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004068void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004069 // Use the calling convention instead of the location of the receiver, as
4070 // intrinsics may have put the receiver in a different register. In the intrinsics
4071 // slow path, the arguments have been moved to the right place, so here we are
4072 // guaranteed that the receiver is the first register of the calling convention.
4073 InvokeDexCallingConvention calling_convention;
4074 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004075 Register temp = XRegisterFrom(temp_in);
4076 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4077 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4078 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004079 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004080
4081 BlockPoolsScope block_pools(GetVIXLAssembler());
4082
4083 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004084 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004085 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004086 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004087 // Instead of simply (possibly) unpoisoning `temp` here, we should
4088 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004089 // intermediate/temporary reference and because the current
4090 // concurrent copying collector keeps the from-space memory
4091 // intact/accessible until the end of the marking phase (the
4092 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004093 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4094 // temp = temp->GetMethodAt(method_offset);
4095 __ Ldr(temp, MemOperand(temp, method_offset));
4096 // lr = temp->GetEntryPoint();
4097 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
4098 // lr();
4099 __ Blr(lr);
4100}
4101
Orion Hodsonac141392017-01-13 11:53:47 +00004102void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4103 HandleInvoke(invoke);
4104}
4105
4106void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4107 codegen_->GenerateInvokePolymorphicCall(invoke);
4108}
4109
Scott Wakeling97c72b72016-06-24 16:19:36 +01004110vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
4111 const DexFile& dex_file,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004112 dex::StringIndex string_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004113 vixl::aarch64::Label* adrp_label) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004114 return
4115 NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004116}
4117
Scott Wakeling97c72b72016-06-24 16:19:36 +01004118vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
4119 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004120 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004121 vixl::aarch64::Label* adrp_label) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004122 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004123}
4124
Vladimir Marko1998cd02017-01-13 13:02:58 +00004125vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4126 const DexFile& dex_file,
4127 dex::TypeIndex type_index,
4128 vixl::aarch64::Label* adrp_label) {
4129 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
4130}
4131
Scott Wakeling97c72b72016-06-24 16:19:36 +01004132vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
4133 const DexFile& dex_file,
4134 uint32_t element_offset,
4135 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004136 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
4137}
4138
Scott Wakeling97c72b72016-06-24 16:19:36 +01004139vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
4140 const DexFile& dex_file,
4141 uint32_t offset_or_index,
4142 vixl::aarch64::Label* adrp_label,
4143 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004144 // Add a patch entry and return the label.
4145 patches->emplace_back(dex_file, offset_or_index);
4146 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004147 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004148 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4149 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4150 return label;
4151}
4152
Scott Wakeling97c72b72016-06-24 16:19:36 +01004153vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004154 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004155 return boot_image_string_patches_.GetOrCreate(
4156 StringReference(&dex_file, string_index),
4157 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4158}
4159
Scott Wakeling97c72b72016-06-24 16:19:36 +01004160vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Andreas Gampea5b09a62016-11-17 15:21:22 -08004161 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004162 return boot_image_type_patches_.GetOrCreate(
4163 TypeReference(&dex_file, type_index),
4164 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4165}
4166
Scott Wakeling97c72b72016-06-24 16:19:36 +01004167vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4168 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004169 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
4170 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
4171 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
4172}
4173
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004174vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004175 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
4176 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
4177 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004178 return jit_string_patches_.GetOrCreate(
4179 StringReference(&dex_file, string_index),
4180 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4181}
4182
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004183vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
4184 const DexFile& dex_file, dex::TypeIndex type_index, uint64_t address) {
4185 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index), address);
4186 return jit_class_patches_.GetOrCreate(
4187 TypeReference(&dex_file, type_index),
4188 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4189}
4190
Vladimir Markoaad75c62016-10-03 08:46:48 +00004191void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4192 vixl::aarch64::Register reg) {
4193 DCHECK(reg.IsX());
4194 SingleEmissionCheckScope guard(GetVIXLAssembler());
4195 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004196 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004197}
4198
4199void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4200 vixl::aarch64::Register out,
4201 vixl::aarch64::Register base) {
4202 DCHECK(out.IsX());
4203 DCHECK(base.IsX());
4204 SingleEmissionCheckScope guard(GetVIXLAssembler());
4205 __ Bind(fixup_label);
4206 __ add(out, base, Operand(/* offset placeholder */ 0));
4207}
4208
4209void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4210 vixl::aarch64::Register out,
4211 vixl::aarch64::Register base) {
4212 DCHECK(base.IsX());
4213 SingleEmissionCheckScope guard(GetVIXLAssembler());
4214 __ Bind(fixup_label);
4215 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4216}
4217
4218template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4219inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4220 const ArenaDeque<PcRelativePatchInfo>& infos,
4221 ArenaVector<LinkerPatch>* linker_patches) {
4222 for (const PcRelativePatchInfo& info : infos) {
4223 linker_patches->push_back(Factory(info.label.GetLocation(),
4224 &info.target_dex_file,
4225 info.pc_insn_label->GetLocation(),
4226 info.offset_or_index));
4227 }
4228}
4229
Vladimir Marko58155012015-08-19 12:49:41 +00004230void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4231 DCHECK(linker_patches->empty());
4232 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004233 pc_relative_dex_cache_patches_.size() +
4234 boot_image_string_patches_.size() +
4235 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004236 boot_image_type_patches_.size() +
4237 pc_relative_type_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00004238 type_bss_entry_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004239 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004240 linker_patches->reserve(size);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004241 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004242 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00004243 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004244 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004245 info.offset_or_index));
4246 }
4247 for (const auto& entry : boot_image_string_patches_) {
4248 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004249 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4250 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004251 target_string.dex_file,
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004252 target_string.string_index.index_));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004253 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004254 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004255 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004256 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
4257 linker_patches);
4258 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004259 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
4260 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004261 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
4262 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004263 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004264 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4265 linker_patches);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004266 for (const auto& entry : boot_image_type_patches_) {
4267 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004268 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4269 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004270 target_type.dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004271 target_type.type_index.index_));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004272 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004273 for (const auto& entry : boot_image_address_patches_) {
4274 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004275 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4276 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00004277 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004278 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004279}
4280
Scott Wakeling97c72b72016-06-24 16:19:36 +01004281vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004282 Uint32ToLiteralMap* map) {
4283 return map->GetOrCreate(
4284 value,
4285 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4286}
4287
Scott Wakeling97c72b72016-06-24 16:19:36 +01004288vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004289 return uint64_literals_.GetOrCreate(
4290 value,
4291 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004292}
4293
Scott Wakeling97c72b72016-06-24 16:19:36 +01004294vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00004295 MethodReference target_method,
4296 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004297 return map->GetOrCreate(
4298 target_method,
4299 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00004300}
4301
Andreas Gampe878d58c2015-01-15 23:24:00 -08004302void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004303 // Explicit clinit checks triggered by static invokes must have been pruned by
4304 // art::PrepareForRegisterAllocation.
4305 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004306
Andreas Gampe878d58c2015-01-15 23:24:00 -08004307 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4308 return;
4309 }
4310
Alexandre Ramesd921d642015-04-16 15:07:16 +01004311 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004312 LocationSummary* locations = invoke->GetLocations();
4313 codegen_->GenerateStaticOrDirectCall(
4314 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004315 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004316}
4317
4318void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004319 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4320 return;
4321 }
4322
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004323 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004324 DCHECK(!codegen_->IsLeafMethod());
4325 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4326}
4327
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004328HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4329 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004330 switch (desired_class_load_kind) {
4331 case HLoadClass::LoadKind::kReferrersClass:
4332 break;
4333 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4334 DCHECK(!GetCompilerOptions().GetCompilePic());
4335 break;
4336 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4337 DCHECK(GetCompilerOptions().GetCompilePic());
4338 break;
4339 case HLoadClass::LoadKind::kBootImageAddress:
4340 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004341 case HLoadClass::LoadKind::kBssEntry:
4342 DCHECK(!Runtime::Current()->UseJitCompilation());
4343 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004344 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004345 DCHECK(Runtime::Current()->UseJitCompilation());
4346 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004347 case HLoadClass::LoadKind::kDexCacheViaMethod:
4348 break;
4349 }
4350 return desired_class_load_kind;
4351}
4352
Alexandre Rames67555f72014-11-18 10:55:16 +00004353void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004354 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4355 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004356 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004357 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004358 cls,
4359 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004360 LocationFrom(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004361 return;
4362 }
Vladimir Marko41559982017-01-06 14:04:23 +00004363 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004364
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004365 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4366 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004367 ? LocationSummary::kCallOnSlowPath
4368 : LocationSummary::kNoCall;
4369 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004370 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004371 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004372 }
4373
Vladimir Marko41559982017-01-06 14:04:23 +00004374 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004375 locations->SetInAt(0, Location::RequiresRegister());
4376 }
4377 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004378}
4379
4380void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004381 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4382 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4383 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01004384 return;
4385 }
Vladimir Marko41559982017-01-06 14:04:23 +00004386 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004387
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004388 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004389 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004390
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004391 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4392 ? kWithoutReadBarrier
4393 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004394 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004395 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004396 case HLoadClass::LoadKind::kReferrersClass: {
4397 DCHECK(!cls->CanCallRuntime());
4398 DCHECK(!cls->MustGenerateClinitCheck());
4399 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4400 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004401 GenerateGcRootFieldLoad(cls,
4402 out_loc,
4403 current_method,
4404 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004405 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004406 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004407 break;
4408 }
4409 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004410 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004411 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4412 cls->GetTypeIndex()));
4413 break;
4414 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004415 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004416 // Add ADRP with its PC-relative type patch.
4417 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004418 dex::TypeIndex type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004419 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004420 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004421 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004422 vixl::aarch64::Label* add_label =
4423 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004424 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004425 break;
4426 }
4427 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004428 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004429 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4430 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4431 break;
4432 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004433 case HLoadClass::LoadKind::kBssEntry: {
4434 // Add ADRP with its PC-relative Class .bss entry patch.
4435 const DexFile& dex_file = cls->GetDexFile();
4436 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko1998cd02017-01-13 13:02:58 +00004437 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004438 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
4439 // Add LDR with its PC-relative Class patch.
4440 vixl::aarch64::Label* ldr_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +00004441 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004442 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4443 GenerateGcRootFieldLoad(cls,
4444 cls->GetLocations()->Out(),
4445 out.X(),
4446 /* placeholder */ 0u,
4447 ldr_label,
4448 kCompilerReadBarrierOption);
4449 generate_null_check = true;
4450 break;
4451 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004452 case HLoadClass::LoadKind::kJitTableAddress: {
4453 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4454 cls->GetTypeIndex(),
4455 cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004456 GenerateGcRootFieldLoad(cls,
4457 out_loc,
4458 out.X(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004459 /* offset */ 0,
Roland Levillain00468f32016-10-27 18:02:48 +01004460 /* fixup_label */ nullptr,
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004461 kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004462 break;
4463 }
Vladimir Marko41559982017-01-06 14:04:23 +00004464 case HLoadClass::LoadKind::kDexCacheViaMethod:
4465 LOG(FATAL) << "UNREACHABLE";
4466 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004467 }
4468
4469 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4470 DCHECK(cls->CanCallRuntime());
4471 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4472 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4473 codegen_->AddSlowPath(slow_path);
4474 if (generate_null_check) {
4475 __ Cbz(out, slow_path->GetEntryLabel());
4476 }
4477 if (cls->MustGenerateClinitCheck()) {
4478 GenerateClassInitializationCheck(slow_path, out);
4479 } else {
4480 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004481 }
4482 }
4483}
4484
David Brazdilcb1c0552015-08-04 16:22:25 +01004485static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004486 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004487}
4488
Alexandre Rames67555f72014-11-18 10:55:16 +00004489void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4490 LocationSummary* locations =
4491 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4492 locations->SetOut(Location::RequiresRegister());
4493}
4494
4495void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004496 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4497}
4498
4499void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4500 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4501}
4502
4503void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4504 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004505}
4506
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004507HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4508 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004509 switch (desired_string_load_kind) {
4510 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4511 DCHECK(!GetCompilerOptions().GetCompilePic());
4512 break;
4513 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4514 DCHECK(GetCompilerOptions().GetCompilePic());
4515 break;
4516 case HLoadString::LoadKind::kBootImageAddress:
4517 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004518 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004519 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004520 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004521 case HLoadString::LoadKind::kJitTableAddress:
4522 DCHECK(Runtime::Current()->UseJitCompilation());
4523 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004524 case HLoadString::LoadKind::kDexCacheViaMethod:
4525 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004526 }
4527 return desired_string_load_kind;
4528}
4529
Alexandre Rames67555f72014-11-18 10:55:16 +00004530void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004531 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004532 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004533 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004534 InvokeRuntimeCallingConvention calling_convention;
4535 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4536 } else {
4537 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004538 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4539 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4540 // Rely on the pResolveString and/or marking to save everything, including temps.
4541 RegisterSet caller_saves = RegisterSet::Empty();
4542 InvokeRuntimeCallingConvention calling_convention;
4543 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4544 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4545 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4546 Primitive::kPrimNot).GetCode());
4547 locations->SetCustomSlowPathCallerSaves(caller_saves);
4548 } else {
4549 // For non-Baker read barrier we have a temp-clobbering call.
4550 }
4551 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004552 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004553}
4554
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004555// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4556// move.
4557void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004558 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004559 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004560
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004561 switch (load->GetLoadKind()) {
4562 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004563 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4564 load->GetStringIndex()));
4565 return; // No dex cache slow path.
4566 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004567 // Add ADRP with its PC-relative String patch.
4568 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004569 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004570 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004571 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004572 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004573 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004574 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004575 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004576 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004577 return; // No dex cache slow path.
4578 }
4579 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004580 uint32_t address = dchecked_integral_cast<uint32_t>(
4581 reinterpret_cast<uintptr_t>(load->GetString().Get()));
4582 DCHECK_NE(address, 0u);
4583 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004584 return; // No dex cache slow path.
4585 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004586 case HLoadString::LoadKind::kBssEntry: {
4587 // Add ADRP with its PC-relative String .bss entry patch.
4588 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004589 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004590 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004591 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4592 Register temp = temps.AcquireX();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004593 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004594 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004595 // Add LDR with its PC-relative String patch.
4596 vixl::aarch64::Label* ldr_label =
4597 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004598 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoaad75c62016-10-03 08:46:48 +00004599 GenerateGcRootFieldLoad(load,
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004600 out_loc,
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004601 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01004602 /* offset placeholder */ 0u,
4603 ldr_label,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004604 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004605 SlowPathCodeARM64* slow_path =
4606 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004607 codegen_->AddSlowPath(slow_path);
4608 __ Cbz(out.X(), slow_path->GetEntryLabel());
4609 __ Bind(slow_path->GetExitLabel());
4610 return;
4611 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004612 case HLoadString::LoadKind::kJitTableAddress: {
4613 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004614 load->GetStringIndex(),
4615 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004616 GenerateGcRootFieldLoad(load,
4617 out_loc,
4618 out.X(),
4619 /* offset */ 0,
4620 /* fixup_label */ nullptr,
4621 kCompilerReadBarrierOption);
4622 return;
4623 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004624 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004625 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004626 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004627
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004628 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004629 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004630 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004631 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004632 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4633 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004634}
4635
Alexandre Rames5319def2014-10-23 10:03:10 +01004636void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4637 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4638 locations->SetOut(Location::ConstantLocation(constant));
4639}
4640
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004641void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004642 // Will be generated at use site.
4643}
4644
Alexandre Rames67555f72014-11-18 10:55:16 +00004645void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4646 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004647 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004648 InvokeRuntimeCallingConvention calling_convention;
4649 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4650}
4651
4652void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004653 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4654 instruction,
4655 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004656 if (instruction->IsEnter()) {
4657 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4658 } else {
4659 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4660 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004661}
4662
Alexandre Rames42d641b2014-10-27 14:00:51 +00004663void LocationsBuilderARM64::VisitMul(HMul* mul) {
4664 LocationSummary* locations =
4665 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4666 switch (mul->GetResultType()) {
4667 case Primitive::kPrimInt:
4668 case Primitive::kPrimLong:
4669 locations->SetInAt(0, Location::RequiresRegister());
4670 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004671 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004672 break;
4673
4674 case Primitive::kPrimFloat:
4675 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004676 locations->SetInAt(0, Location::RequiresFpuRegister());
4677 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004678 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004679 break;
4680
4681 default:
4682 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4683 }
4684}
4685
4686void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4687 switch (mul->GetResultType()) {
4688 case Primitive::kPrimInt:
4689 case Primitive::kPrimLong:
4690 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4691 break;
4692
4693 case Primitive::kPrimFloat:
4694 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004695 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004696 break;
4697
4698 default:
4699 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4700 }
4701}
4702
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004703void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4704 LocationSummary* locations =
4705 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4706 switch (neg->GetResultType()) {
4707 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004708 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004709 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004710 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004711 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004712
4713 case Primitive::kPrimFloat:
4714 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004715 locations->SetInAt(0, Location::RequiresFpuRegister());
4716 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004717 break;
4718
4719 default:
4720 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4721 }
4722}
4723
4724void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4725 switch (neg->GetResultType()) {
4726 case Primitive::kPrimInt:
4727 case Primitive::kPrimLong:
4728 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4729 break;
4730
4731 case Primitive::kPrimFloat:
4732 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004733 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004734 break;
4735
4736 default:
4737 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4738 }
4739}
4740
4741void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4742 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004743 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004744 InvokeRuntimeCallingConvention calling_convention;
4745 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004746 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004747 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004748 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004749}
4750
4751void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4752 LocationSummary* locations = instruction->GetLocations();
4753 InvokeRuntimeCallingConvention calling_convention;
4754 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4755 DCHECK(type_index.Is(w0));
Andreas Gampea5b09a62016-11-17 15:21:22 -08004756 __ Mov(type_index, instruction->GetTypeIndex().index_);
Roland Levillain4d027112015-07-01 15:41:14 +01004757 // Note: if heap poisoning is enabled, the entry point takes cares
4758 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004759 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004760 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004761}
4762
Alexandre Rames5319def2014-10-23 10:03:10 +01004763void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4764 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004765 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004766 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004767 if (instruction->IsStringAlloc()) {
4768 locations->AddTemp(LocationFrom(kArtMethodRegister));
4769 } else {
4770 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004771 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004772 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4773}
4774
4775void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004776 // Note: if heap poisoning is enabled, the entry point takes cares
4777 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004778 if (instruction->IsStringAlloc()) {
4779 // String is allocated through StringFactory. Call NewEmptyString entry point.
4780 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004781 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004782 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4783 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4784 __ Blr(lr);
4785 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4786 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004787 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004788 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004789 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004790}
4791
4792void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4793 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004794 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004795 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004796}
4797
4798void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004799 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004800 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004801 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004802 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004803 break;
4804
4805 default:
4806 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4807 }
4808}
4809
David Brazdil66d126e2015-04-03 16:02:44 +01004810void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4811 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4812 locations->SetInAt(0, Location::RequiresRegister());
4813 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4814}
4815
4816void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004817 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004818}
4819
Alexandre Rames5319def2014-10-23 10:03:10 +01004820void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004821 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4822 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01004823}
4824
Calin Juravle2ae48182016-03-16 14:05:09 +00004825void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4826 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004827 return;
4828 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004829
Alexandre Ramesd921d642015-04-16 15:07:16 +01004830 BlockPoolsScope block_pools(GetVIXLAssembler());
4831 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004832 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004833 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004834}
4835
Calin Juravle2ae48182016-03-16 14:05:09 +00004836void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004837 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004838 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004839
4840 LocationSummary* locations = instruction->GetLocations();
4841 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004842
4843 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004844}
4845
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004846void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004847 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004848}
4849
Alexandre Rames67555f72014-11-18 10:55:16 +00004850void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4851 HandleBinaryOp(instruction);
4852}
4853
4854void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4855 HandleBinaryOp(instruction);
4856}
4857
Alexandre Rames3e69f162014-12-10 10:36:50 +00004858void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4859 LOG(FATAL) << "Unreachable";
4860}
4861
4862void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4863 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4864}
4865
Alexandre Rames5319def2014-10-23 10:03:10 +01004866void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4867 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4868 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4869 if (location.IsStackSlot()) {
4870 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4871 } else if (location.IsDoubleStackSlot()) {
4872 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4873 }
4874 locations->SetOut(location);
4875}
4876
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004877void InstructionCodeGeneratorARM64::VisitParameterValue(
4878 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004879 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004880}
4881
4882void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4883 LocationSummary* locations =
4884 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004885 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004886}
4887
4888void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4889 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4890 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004891}
4892
4893void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4894 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004895 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004896 locations->SetInAt(i, Location::Any());
4897 }
4898 locations->SetOut(Location::Any());
4899}
4900
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004901void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004902 LOG(FATAL) << "Unreachable";
4903}
4904
Serban Constantinescu02164b32014-11-13 14:05:07 +00004905void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004906 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004907 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004908 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4909 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004910 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4911
4912 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004913 case Primitive::kPrimInt:
4914 case Primitive::kPrimLong:
4915 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004916 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004917 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4918 break;
4919
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004920 case Primitive::kPrimFloat:
4921 case Primitive::kPrimDouble: {
4922 InvokeRuntimeCallingConvention calling_convention;
4923 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4924 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4925 locations->SetOut(calling_convention.GetReturnLocation(type));
4926
4927 break;
4928 }
4929
Serban Constantinescu02164b32014-11-13 14:05:07 +00004930 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004931 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004932 }
4933}
4934
4935void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4936 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004937
Serban Constantinescu02164b32014-11-13 14:05:07 +00004938 switch (type) {
4939 case Primitive::kPrimInt:
4940 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004941 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004942 break;
4943 }
4944
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004945 case Primitive::kPrimFloat:
4946 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004947 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4948 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004949 if (type == Primitive::kPrimFloat) {
4950 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4951 } else {
4952 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4953 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004954 break;
4955 }
4956
Serban Constantinescu02164b32014-11-13 14:05:07 +00004957 default:
4958 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004959 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004960 }
4961}
4962
Calin Juravle27df7582015-04-17 19:12:31 +01004963void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4964 memory_barrier->SetLocations(nullptr);
4965}
4966
4967void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004968 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004969}
4970
Alexandre Rames5319def2014-10-23 10:03:10 +01004971void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4972 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4973 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004974 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004975}
4976
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004977void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004978 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004979}
4980
4981void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4982 instruction->SetLocations(nullptr);
4983}
4984
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004985void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004986 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004987}
4988
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004989void LocationsBuilderARM64::VisitRor(HRor* ror) {
4990 HandleBinaryOp(ror);
4991}
4992
4993void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4994 HandleBinaryOp(ror);
4995}
4996
Serban Constantinescu02164b32014-11-13 14:05:07 +00004997void LocationsBuilderARM64::VisitShl(HShl* shl) {
4998 HandleShift(shl);
4999}
5000
5001void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5002 HandleShift(shl);
5003}
5004
5005void LocationsBuilderARM64::VisitShr(HShr* shr) {
5006 HandleShift(shr);
5007}
5008
5009void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5010 HandleShift(shr);
5011}
5012
Alexandre Rames5319def2014-10-23 10:03:10 +01005013void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005014 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005015}
5016
5017void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005018 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005019}
5020
Alexandre Rames67555f72014-11-18 10:55:16 +00005021void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005022 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00005023}
5024
5025void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005026 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005027}
5028
5029void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005030 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005031}
5032
Alexandre Rames67555f72014-11-18 10:55:16 +00005033void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005034 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005035}
5036
Calin Juravlee460d1d2015-09-29 04:52:17 +01005037void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5038 HUnresolvedInstanceFieldGet* instruction) {
5039 FieldAccessCallingConventionARM64 calling_convention;
5040 codegen_->CreateUnresolvedFieldLocationSummary(
5041 instruction, instruction->GetFieldType(), calling_convention);
5042}
5043
5044void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5045 HUnresolvedInstanceFieldGet* instruction) {
5046 FieldAccessCallingConventionARM64 calling_convention;
5047 codegen_->GenerateUnresolvedFieldAccess(instruction,
5048 instruction->GetFieldType(),
5049 instruction->GetFieldIndex(),
5050 instruction->GetDexPc(),
5051 calling_convention);
5052}
5053
5054void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5055 HUnresolvedInstanceFieldSet* instruction) {
5056 FieldAccessCallingConventionARM64 calling_convention;
5057 codegen_->CreateUnresolvedFieldLocationSummary(
5058 instruction, instruction->GetFieldType(), calling_convention);
5059}
5060
5061void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5062 HUnresolvedInstanceFieldSet* instruction) {
5063 FieldAccessCallingConventionARM64 calling_convention;
5064 codegen_->GenerateUnresolvedFieldAccess(instruction,
5065 instruction->GetFieldType(),
5066 instruction->GetFieldIndex(),
5067 instruction->GetDexPc(),
5068 calling_convention);
5069}
5070
5071void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5072 HUnresolvedStaticFieldGet* instruction) {
5073 FieldAccessCallingConventionARM64 calling_convention;
5074 codegen_->CreateUnresolvedFieldLocationSummary(
5075 instruction, instruction->GetFieldType(), calling_convention);
5076}
5077
5078void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5079 HUnresolvedStaticFieldGet* instruction) {
5080 FieldAccessCallingConventionARM64 calling_convention;
5081 codegen_->GenerateUnresolvedFieldAccess(instruction,
5082 instruction->GetFieldType(),
5083 instruction->GetFieldIndex(),
5084 instruction->GetDexPc(),
5085 calling_convention);
5086}
5087
5088void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5089 HUnresolvedStaticFieldSet* instruction) {
5090 FieldAccessCallingConventionARM64 calling_convention;
5091 codegen_->CreateUnresolvedFieldLocationSummary(
5092 instruction, instruction->GetFieldType(), calling_convention);
5093}
5094
5095void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5096 HUnresolvedStaticFieldSet* instruction) {
5097 FieldAccessCallingConventionARM64 calling_convention;
5098 codegen_->GenerateUnresolvedFieldAccess(instruction,
5099 instruction->GetFieldType(),
5100 instruction->GetFieldIndex(),
5101 instruction->GetDexPc(),
5102 calling_convention);
5103}
5104
Alexandre Rames5319def2014-10-23 10:03:10 +01005105void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005106 LocationSummary* locations =
5107 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005108 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01005109}
5110
5111void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005112 HBasicBlock* block = instruction->GetBlock();
5113 if (block->GetLoopInformation() != nullptr) {
5114 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5115 // The back edge will generate the suspend check.
5116 return;
5117 }
5118 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5119 // The goto will generate the suspend check.
5120 return;
5121 }
5122 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01005123}
5124
Alexandre Rames67555f72014-11-18 10:55:16 +00005125void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
5126 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005127 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005128 InvokeRuntimeCallingConvention calling_convention;
5129 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5130}
5131
5132void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005133 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005134 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005135}
5136
5137void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5138 LocationSummary* locations =
5139 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
5140 Primitive::Type input_type = conversion->GetInputType();
5141 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00005142 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00005143 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5144 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5145 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5146 }
5147
Alexandre Rames542361f2015-01-29 16:57:31 +00005148 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005149 locations->SetInAt(0, Location::RequiresFpuRegister());
5150 } else {
5151 locations->SetInAt(0, Location::RequiresRegister());
5152 }
5153
Alexandre Rames542361f2015-01-29 16:57:31 +00005154 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005155 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5156 } else {
5157 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5158 }
5159}
5160
5161void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
5162 Primitive::Type result_type = conversion->GetResultType();
5163 Primitive::Type input_type = conversion->GetInputType();
5164
5165 DCHECK_NE(input_type, result_type);
5166
Alexandre Rames542361f2015-01-29 16:57:31 +00005167 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005168 int result_size = Primitive::ComponentSize(result_type);
5169 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005170 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005171 Register output = OutputRegister(conversion);
5172 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00005173 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005174 // 'int' values are used directly as W registers, discarding the top
5175 // bits, so we don't need to sign-extend and can just perform a move.
5176 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5177 // top 32 bits of the target register. We theoretically could leave those
5178 // bits unchanged, but we would have to make sure that no code uses a
5179 // 32bit input value as a 64bit value assuming that the top 32 bits are
5180 // zero.
5181 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00005182 } else if (result_type == Primitive::kPrimChar ||
5183 (input_type == Primitive::kPrimChar && input_size < result_size)) {
5184 __ Ubfx(output,
5185 output.IsX() ? source.X() : source.W(),
5186 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005187 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005188 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005189 }
Alexandre Rames542361f2015-01-29 16:57:31 +00005190 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005191 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005192 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005193 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
5194 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005195 } else if (Primitive::IsFloatingPointType(result_type) &&
5196 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005197 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5198 } else {
5199 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5200 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005201 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005202}
Alexandre Rames67555f72014-11-18 10:55:16 +00005203
Serban Constantinescu02164b32014-11-13 14:05:07 +00005204void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5205 HandleShift(ushr);
5206}
5207
5208void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5209 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005210}
5211
5212void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5213 HandleBinaryOp(instruction);
5214}
5215
5216void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5217 HandleBinaryOp(instruction);
5218}
5219
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005220void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005221 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005222 LOG(FATAL) << "Unreachable";
5223}
5224
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005225void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005226 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005227 LOG(FATAL) << "Unreachable";
5228}
5229
Mark Mendellfe57faa2015-09-18 09:26:15 -04005230// Simple implementation of packed switch - generate cascaded compare/jumps.
5231void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5232 LocationSummary* locations =
5233 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
5234 locations->SetInAt(0, Location::RequiresRegister());
5235}
5236
5237void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5238 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005239 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005240 Register value_reg = InputRegisterAt(switch_instr, 0);
5241 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5242
Zheng Xu3927c8b2015-11-18 17:46:25 +08005243 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005244 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005245 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5246 // make sure we don't emit it if the target may run out of range.
5247 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5248 // ranges and emit the tables only as required.
5249 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005250
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005251 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005252 // Current instruction id is an upper bound of the number of HIRs in the graph.
5253 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5254 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005255 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5256 Register temp = temps.AcquireW();
5257 __ Subs(temp, value_reg, Operand(lower_bound));
5258
Zheng Xu3927c8b2015-11-18 17:46:25 +08005259 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005260 // Jump to successors[0] if value == lower_bound.
5261 __ B(eq, codegen_->GetLabelOf(successors[0]));
5262 int32_t last_index = 0;
5263 for (; num_entries - last_index > 2; last_index += 2) {
5264 __ Subs(temp, temp, Operand(2));
5265 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5266 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5267 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5268 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5269 }
5270 if (num_entries - last_index == 2) {
5271 // The last missing case_value.
5272 __ Cmp(temp, Operand(1));
5273 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005274 }
5275
5276 // And the default for any other value.
5277 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5278 __ B(codegen_->GetLabelOf(default_block));
5279 }
5280 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005281 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005282
5283 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5284
5285 // Below instructions should use at most one blocked register. Since there are two blocked
5286 // registers, we are free to block one.
5287 Register temp_w = temps.AcquireW();
5288 Register index;
5289 // Remove the bias.
5290 if (lower_bound != 0) {
5291 index = temp_w;
5292 __ Sub(index, value_reg, Operand(lower_bound));
5293 } else {
5294 index = value_reg;
5295 }
5296
5297 // Jump to default block if index is out of the range.
5298 __ Cmp(index, Operand(num_entries));
5299 __ B(hs, codegen_->GetLabelOf(default_block));
5300
5301 // In current VIXL implementation, it won't require any blocked registers to encode the
5302 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5303 // register pressure.
5304 Register table_base = temps.AcquireX();
5305 // Load jump offset from the table.
5306 __ Adr(table_base, jump_table->GetTableStartLabel());
5307 Register jump_offset = temp_w;
5308 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5309
5310 // Jump to target block by branching to table_base(pc related) + offset.
5311 Register target_address = table_base;
5312 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5313 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005314 }
5315}
5316
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005317void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5318 HInstruction* instruction,
5319 Location out,
5320 uint32_t offset,
5321 Location maybe_temp,
5322 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005323 Primitive::Type type = Primitive::kPrimNot;
5324 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005325 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005326 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005327 Register temp_reg = RegisterFrom(maybe_temp, type);
5328 if (kUseBakerReadBarrier) {
5329 // Load with fast path based Baker's read barrier.
5330 // /* HeapReference<Object> */ out = *(out + offset)
5331 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5332 out,
5333 out_reg,
5334 offset,
5335 temp_reg,
5336 /* needs_null_check */ false,
5337 /* use_load_acquire */ false);
5338 } else {
5339 // Load with slow path based read barrier.
5340 // Save the value of `out` into `maybe_temp` before overwriting it
5341 // in the following move operation, as we will need it for the
5342 // read barrier below.
5343 __ Mov(temp_reg, out_reg);
5344 // /* HeapReference<Object> */ out = *(out + offset)
5345 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5346 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5347 }
5348 } else {
5349 // Plain load with no read barrier.
5350 // /* HeapReference<Object> */ out = *(out + offset)
5351 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5352 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5353 }
5354}
5355
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005356void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5357 HInstruction* instruction,
5358 Location out,
5359 Location obj,
5360 uint32_t offset,
5361 Location maybe_temp,
5362 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005363 Primitive::Type type = Primitive::kPrimNot;
5364 Register out_reg = RegisterFrom(out, type);
5365 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005366 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005367 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005368 if (kUseBakerReadBarrier) {
5369 // Load with fast path based Baker's read barrier.
5370 Register temp_reg = RegisterFrom(maybe_temp, type);
5371 // /* HeapReference<Object> */ out = *(obj + offset)
5372 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5373 out,
5374 obj_reg,
5375 offset,
5376 temp_reg,
5377 /* needs_null_check */ false,
5378 /* use_load_acquire */ false);
5379 } else {
5380 // Load with slow path based read barrier.
5381 // /* HeapReference<Object> */ out = *(obj + offset)
5382 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5383 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5384 }
5385 } else {
5386 // Plain load with no read barrier.
5387 // /* HeapReference<Object> */ out = *(obj + offset)
5388 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5389 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5390 }
5391}
5392
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005393void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(
5394 HInstruction* instruction,
5395 Location root,
5396 Register obj,
5397 uint32_t offset,
5398 vixl::aarch64::Label* fixup_label,
5399 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005400 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005401 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005402 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005403 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005404 if (kUseBakerReadBarrier) {
5405 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5406 // Baker's read barrier are used:
5407 //
5408 // root = obj.field;
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005409 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5410 // if (temp != null) {
5411 // root = temp(root)
Roland Levillain44015862016-01-22 11:47:17 +00005412 // }
5413
5414 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005415 if (fixup_label == nullptr) {
5416 __ Ldr(root_reg, MemOperand(obj, offset));
5417 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005418 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005419 }
Roland Levillain44015862016-01-22 11:47:17 +00005420 static_assert(
5421 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5422 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5423 "have different sizes.");
5424 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5425 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5426 "have different sizes.");
5427
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005428 Register temp = lr;
Roland Levillain44015862016-01-22 11:47:17 +00005429
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005430 // Slow path marking the GC root `root`. The entrypoint will alrady be loaded in temp.
5431 SlowPathCodeARM64* slow_path =
5432 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction,
5433 root,
5434 LocationFrom(temp));
5435 codegen_->AddSlowPath(slow_path);
5436 const int32_t entry_point_offset =
5437 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(root.reg());
5438 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5439 // Loading the entrypoint does not require a load acquire since it is only changed when
5440 // threads are suspended or running a checkpoint.
5441 __ Ldr(temp, MemOperand(tr, entry_point_offset));
5442 // The entrypoint is null when the GC is not marking, this prevents one load compared to
5443 // checking GetIsGcMarking.
Roland Levillain44015862016-01-22 11:47:17 +00005444 __ Cbnz(temp, slow_path->GetEntryLabel());
5445 __ Bind(slow_path->GetExitLabel());
5446 } else {
5447 // GC root loaded through a slow path for read barriers other
5448 // than Baker's.
5449 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005450 if (fixup_label == nullptr) {
5451 __ Add(root_reg.X(), obj.X(), offset);
5452 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005453 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005454 }
Roland Levillain44015862016-01-22 11:47:17 +00005455 // /* mirror::Object* */ root = root->Read()
5456 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5457 }
5458 } else {
5459 // Plain GC root load with no read barrier.
5460 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005461 if (fixup_label == nullptr) {
5462 __ Ldr(root_reg, MemOperand(obj, offset));
5463 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005464 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005465 }
Roland Levillain44015862016-01-22 11:47:17 +00005466 // Note that GC roots are not affected by heap poisoning, thus we
5467 // do not have to unpoison `root_reg` here.
5468 }
5469}
5470
5471void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5472 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005473 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005474 uint32_t offset,
5475 Register temp,
5476 bool needs_null_check,
5477 bool use_load_acquire) {
5478 DCHECK(kEmitCompilerReadBarrier);
5479 DCHECK(kUseBakerReadBarrier);
5480
5481 // /* HeapReference<Object> */ ref = *(obj + offset)
5482 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005483 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01005484 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5485 ref,
5486 obj,
5487 offset,
5488 no_index,
5489 no_scale_factor,
5490 temp,
5491 needs_null_check,
5492 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005493}
5494
5495void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5496 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005497 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005498 uint32_t data_offset,
5499 Location index,
5500 Register temp,
5501 bool needs_null_check) {
5502 DCHECK(kEmitCompilerReadBarrier);
5503 DCHECK(kUseBakerReadBarrier);
5504
5505 // Array cells are never volatile variables, therefore array loads
5506 // never use Load-Acquire instructions on ARM64.
5507 const bool use_load_acquire = false;
5508
Roland Levillainbfea3352016-06-23 13:48:47 +01005509 static_assert(
5510 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5511 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005512 // /* HeapReference<Object> */ ref =
5513 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005514 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5515 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5516 ref,
5517 obj,
5518 data_offset,
5519 index,
5520 scale_factor,
5521 temp,
5522 needs_null_check,
5523 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005524}
5525
5526void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5527 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005528 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005529 uint32_t offset,
5530 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005531 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005532 Register temp,
5533 bool needs_null_check,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005534 bool use_load_acquire,
5535 bool always_update_field) {
Roland Levillain44015862016-01-22 11:47:17 +00005536 DCHECK(kEmitCompilerReadBarrier);
5537 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005538 // If we are emitting an array load, we should not be using a
5539 // Load Acquire instruction. In other words:
5540 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5541 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005542
5543 MacroAssembler* masm = GetVIXLAssembler();
5544 UseScratchRegisterScope temps(masm);
5545
5546 // In slow path based read barriers, the read barrier call is
5547 // inserted after the original load. However, in fast path based
5548 // Baker's read barriers, we need to perform the load of
5549 // mirror::Object::monitor_ *before* the original reference load.
5550 // This load-load ordering is required by the read barrier.
5551 // The fast path/slow path (for Baker's algorithm) should look like:
5552 //
5553 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5554 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5555 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005556 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain44015862016-01-22 11:47:17 +00005557 // if (is_gray) {
5558 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5559 // }
5560 //
5561 // Note: the original implementation in ReadBarrier::Barrier is
5562 // slightly more complex as it performs additional checks that we do
5563 // not do here for performance reasons.
5564
5565 Primitive::Type type = Primitive::kPrimNot;
5566 Register ref_reg = RegisterFrom(ref, type);
5567 DCHECK(obj.IsW());
5568 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5569
5570 // /* int32_t */ monitor = obj->monitor_
5571 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5572 if (needs_null_check) {
5573 MaybeRecordImplicitNullCheck(instruction);
5574 }
5575 // /* LockWord */ lock_word = LockWord(monitor)
5576 static_assert(sizeof(LockWord) == sizeof(int32_t),
5577 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005578
Vladimir Marko877a0332016-07-11 19:30:56 +01005579 // Introduce a dependency on the lock_word including rb_state,
5580 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005581 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005582 // `obj` is unchanged by this operation, but its value now depends
5583 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005584 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005585
5586 // The actual reference load.
5587 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005588 // Load types involving an "index": ArrayGet,
5589 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5590 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01005591 if (use_load_acquire) {
5592 // UnsafeGetObjectVolatile intrinsic case.
5593 // Register `index` is not an index in an object array, but an
5594 // offset to an object reference field within object `obj`.
5595 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5596 DCHECK(instruction->GetLocations()->Intrinsified());
5597 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5598 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005599 DCHECK_EQ(offset, 0u);
5600 DCHECK_EQ(scale_factor, 0u);
5601 DCHECK_EQ(needs_null_check, 0u);
Roland Levillainbfea3352016-06-23 13:48:47 +01005602 // /* HeapReference<Object> */ ref = *(obj + index)
5603 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5604 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005605 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005606 // ArrayGet and UnsafeGetObject intrinsics cases.
5607 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5608 if (index.IsConstant()) {
5609 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5610 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5611 } else {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005612 Register temp3 = temps.AcquireW();
5613 __ Add(temp3, obj, offset);
5614 Load(type, ref_reg, HeapOperand(temp3, XRegisterFrom(index), LSL, scale_factor));
5615 temps.Release(temp3);
Roland Levillainbfea3352016-06-23 13:48:47 +01005616 }
Roland Levillain44015862016-01-22 11:47:17 +00005617 }
Roland Levillain44015862016-01-22 11:47:17 +00005618 } else {
5619 // /* HeapReference<Object> */ ref = *(obj + offset)
5620 MemOperand field = HeapOperand(obj, offset);
5621 if (use_load_acquire) {
5622 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5623 } else {
5624 Load(type, ref_reg, field);
5625 }
5626 }
5627
5628 // Object* ref = ref_addr->AsMirrorPtr()
5629 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5630
Vladimir Marko953437b2016-08-24 08:30:46 +00005631 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005632 SlowPathCodeARM64* slow_path;
5633 if (always_update_field) {
5634 // ReadBarrierMarkAndUpdateFieldSlowPathARM64 only supports
5635 // address of the form `obj + field_offset`, where `obj` is a
5636 // register and `field_offset` is a register. Thus `offset` and
5637 // `scale_factor` above are expected to be null in this code path.
5638 DCHECK_EQ(offset, 0u);
5639 DCHECK_EQ(scale_factor, 0u); /* "times 1" */
5640 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathARM64(
5641 instruction, ref, obj, /* field_offset */ index, temp);
5642 } else {
5643 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
5644 }
Roland Levillain44015862016-01-22 11:47:17 +00005645 AddSlowPath(slow_path);
5646
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005647 // if (rb_state == ReadBarrier::GrayState())
Roland Levillain44015862016-01-22 11:47:17 +00005648 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005649 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005650 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5651 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko877a0332016-07-11 19:30:56 +01005652 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005653 __ Bind(slow_path->GetExitLabel());
5654}
5655
5656void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5657 Location out,
5658 Location ref,
5659 Location obj,
5660 uint32_t offset,
5661 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005662 DCHECK(kEmitCompilerReadBarrier);
5663
Roland Levillain44015862016-01-22 11:47:17 +00005664 // Insert a slow path based read barrier *after* the reference load.
5665 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005666 // If heap poisoning is enabled, the unpoisoning of the loaded
5667 // reference will be carried out by the runtime within the slow
5668 // path.
5669 //
5670 // Note that `ref` currently does not get unpoisoned (when heap
5671 // poisoning is enabled), which is alright as the `ref` argument is
5672 // not used by the artReadBarrierSlow entry point.
5673 //
5674 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5675 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5676 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5677 AddSlowPath(slow_path);
5678
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005679 __ B(slow_path->GetEntryLabel());
5680 __ Bind(slow_path->GetExitLabel());
5681}
5682
Roland Levillain44015862016-01-22 11:47:17 +00005683void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5684 Location out,
5685 Location ref,
5686 Location obj,
5687 uint32_t offset,
5688 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005689 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005690 // Baker's read barriers shall be handled by the fast path
5691 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5692 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005693 // If heap poisoning is enabled, unpoisoning will be taken care of
5694 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005695 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005696 } else if (kPoisonHeapReferences) {
5697 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5698 }
5699}
5700
Roland Levillain44015862016-01-22 11:47:17 +00005701void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5702 Location out,
5703 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005704 DCHECK(kEmitCompilerReadBarrier);
5705
Roland Levillain44015862016-01-22 11:47:17 +00005706 // Insert a slow path based read barrier *after* the GC root load.
5707 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005708 // Note that GC roots are not affected by heap poisoning, so we do
5709 // not need to do anything special for this here.
5710 SlowPathCodeARM64* slow_path =
5711 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5712 AddSlowPath(slow_path);
5713
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005714 __ B(slow_path->GetEntryLabel());
5715 __ Bind(slow_path->GetExitLabel());
5716}
5717
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005718void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5719 LocationSummary* locations =
5720 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5721 locations->SetInAt(0, Location::RequiresRegister());
5722 locations->SetOut(Location::RequiresRegister());
5723}
5724
5725void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5726 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005727 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005728 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005729 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005730 __ Ldr(XRegisterFrom(locations->Out()),
5731 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005732 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005733 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005734 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005735 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5736 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005737 __ Ldr(XRegisterFrom(locations->Out()),
5738 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005739 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005740}
5741
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005742static void PatchJitRootUse(uint8_t* code,
5743 const uint8_t* roots_data,
5744 vixl::aarch64::Literal<uint32_t>* literal,
5745 uint64_t index_in_table) {
5746 uint32_t literal_offset = literal->GetOffset();
5747 uintptr_t address =
5748 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
5749 uint8_t* data = code + literal_offset;
5750 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
5751}
5752
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005753void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
5754 for (const auto& entry : jit_string_patches_) {
5755 const auto& it = jit_string_roots_.find(entry.first);
5756 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005757 PatchJitRootUse(code, roots_data, entry.second, it->second);
5758 }
5759 for (const auto& entry : jit_class_patches_) {
5760 const auto& it = jit_class_roots_.find(entry.first);
5761 DCHECK(it != jit_class_roots_.end());
5762 PatchJitRootUse(code, roots_data, entry.second, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005763 }
5764}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005765
Alexandre Rames67555f72014-11-18 10:55:16 +00005766#undef __
5767#undef QUICK_ENTRY_POINT
5768
Alexandre Rames5319def2014-10-23 10:03:10 +01005769} // namespace arm64
5770} // namespace art