blob: c978aaa81d0dd69d2873cca48c5bba464c5884de [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Alexandre Rames67555f72014-11-18 10:55:16 +0000135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
157 int64_t core_spill_size = core_list.TotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
162 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000239 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000240 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800241 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100242 }
243
Alexandre Rames8158f282015-08-07 10:26:17 +0100244 bool IsFatal() const OVERRIDE { return true; }
245
Alexandre Rames9931f312015-06-19 14:47:01 +0100246 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
247
Alexandre Rames5319def2014-10-23 10:03:10 +0100248 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100249 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
250};
251
Alexandre Rames67555f72014-11-18 10:55:16 +0000252class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000254 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000255
256 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
257 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
258 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000259 if (instruction_->CanThrowIntoCatchBlock()) {
260 // Live registers will be restored in the catch block if caught.
261 SaveLiveRegisters(codegen, instruction_->GetLocations());
262 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000264 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
281 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000282 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000283 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
284 }
285
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
287 LocationSummary* locations = at_->GetLocations();
288 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
289
290 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000291 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000292
293 InvokeRuntimeCallingConvention calling_convention;
294 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000295 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
296 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000297 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800298 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100299 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100301 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000303
304 // Move the class to the desired location.
305 Location out = locations->Out();
306 if (out.IsValid()) {
307 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
308 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000309 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 }
311
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000312 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 __ B(GetExitLabel());
314 }
315
Alexandre Rames9931f312015-06-19 14:47:01 +0100316 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
317
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 private:
319 // The class this slow path will load.
320 HLoadClass* const cls_;
321
322 // The instruction where this slow path is happening.
323 // (Might be the load class or an initialization check).
324 HInstruction* const at_;
325
326 // The dex PC of `at_`.
327 const uint32_t dex_pc_;
328
329 // Whether to initialize the class.
330 const bool do_clinit_;
331
332 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
333};
334
335class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
336 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000337 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000338
339 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
340 LocationSummary* locations = instruction_->GetLocations();
341 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
344 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000345 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000346
347 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000348 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
349 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000351 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100352 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000354 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000355
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000356 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 __ B(GetExitLabel());
358 }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
361
Alexandre Rames67555f72014-11-18 10:55:16 +0000362 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
364};
365
Alexandre Rames5319def2014-10-23 10:03:10 +0100366class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
367 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100369
Alexandre Rames67555f72014-11-18 10:55:16 +0000370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100372 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000373 if (instruction_->CanThrowIntoCatchBlock()) {
374 // Live registers will be restored in the catch block if caught.
375 SaveLiveRegisters(codegen, instruction_->GetLocations());
376 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000377 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000378 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800379 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100380 }
381
Alexandre Rames8158f282015-08-07 10:26:17 +0100382 bool IsFatal() const OVERRIDE { return true; }
383
Alexandre Rames9931f312015-06-19 14:47:01 +0100384 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
385
Alexandre Rames5319def2014-10-23 10:03:10 +0100386 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
388};
389
390class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
391 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100392 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000393 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100394
Alexandre Rames67555f72014-11-18 10:55:16 +0000395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
396 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100397 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000398 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000400 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800401 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 if (successor_ == nullptr) {
404 __ B(GetReturnLabel());
405 } else {
406 __ B(arm64_codegen->GetLabelOf(successor_));
407 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100408 }
409
410 vixl::Label* GetReturnLabel() {
411 DCHECK(successor_ == nullptr);
412 return &return_label_;
413 }
414
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100415 HBasicBlock* GetSuccessor() const {
416 return successor_;
417 }
418
Alexandre Rames9931f312015-06-19 14:47:01 +0100419 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
420
Alexandre Rames5319def2014-10-23 10:03:10 +0100421 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 // If not null, the block to branch to after the suspend check.
423 HBasicBlock* const successor_;
424
425 // If `successor_` is null, the label to branch to after the suspend check.
426 vixl::Label return_label_;
427
428 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
429};
430
Alexandre Rames67555f72014-11-18 10:55:16 +0000431class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
432 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000433 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000434 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000435
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000437 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100438 Location class_to_check = locations->InAt(1);
439 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
440 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000441 DCHECK(instruction_->IsCheckCast()
442 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
443 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100444 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000445
Alexandre Rames67555f72014-11-18 10:55:16 +0000446 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000447
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000448 if (!is_fatal_) {
449 SaveLiveRegisters(codegen, locations);
450 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000451
452 // We're moving two locations to locations that could overlap, so we need a parallel
453 // move resolver.
454 InvokeRuntimeCallingConvention calling_convention;
455 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100456 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
457 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000458
459 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000460 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100461 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000462 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
463 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000464 Primitive::Type ret_type = instruction_->GetType();
465 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
466 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
467 } else {
468 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100469 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 }
472
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000473 if (!is_fatal_) {
474 RestoreLiveRegisters(codegen, locations);
475 __ B(GetExitLabel());
476 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000477 }
478
Alexandre Rames9931f312015-06-19 14:47:01 +0100479 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000480 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100481
Alexandre Rames67555f72014-11-18 10:55:16 +0000482 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
486};
487
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700488class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
489 public:
Aart Bik42249c32016-01-07 15:33:50 -0800490 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000491 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700492
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800494 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495 __ Bind(GetEntryLabel());
496 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800497 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
498 instruction_,
499 instruction_->GetDexPc(),
500 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000501 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700502 }
503
Alexandre Rames9931f312015-06-19 14:47:01 +0100504 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
505
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700506 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700507 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
508};
509
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100510class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
511 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000512 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513
514 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
515 LocationSummary* locations = instruction_->GetLocations();
516 __ Bind(GetEntryLabel());
517 SaveLiveRegisters(codegen, locations);
518
519 InvokeRuntimeCallingConvention calling_convention;
520 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
521 parallel_move.AddMove(
522 locations->InAt(0),
523 LocationFrom(calling_convention.GetRegisterAt(0)),
524 Primitive::kPrimNot,
525 nullptr);
526 parallel_move.AddMove(
527 locations->InAt(1),
528 LocationFrom(calling_convention.GetRegisterAt(1)),
529 Primitive::kPrimInt,
530 nullptr);
531 parallel_move.AddMove(
532 locations->InAt(2),
533 LocationFrom(calling_convention.GetRegisterAt(2)),
534 Primitive::kPrimNot,
535 nullptr);
536 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
537
538 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
539 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
540 instruction_,
541 instruction_->GetDexPc(),
542 this);
543 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
544 RestoreLiveRegisters(codegen, locations);
545 __ B(GetExitLabel());
546 }
547
548 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
549
550 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100551 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
552};
553
Zheng Xu3927c8b2015-11-18 17:46:25 +0800554void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
555 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000556 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557
558 // We are about to use the assembler to place literals directly. Make sure we have enough
559 // underlying code buffer and we have generated the jump table with right size.
560 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
561 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
562
563 __ Bind(&table_start_);
564 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
565 for (uint32_t i = 0; i < num_entries; i++) {
566 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
567 DCHECK(target_label->IsBound());
568 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
569 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
570 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
571 Literal<int32_t> literal(jump_offset);
572 __ place(&literal);
573 }
574}
575
Roland Levillain44015862016-01-22 11:47:17 +0000576// Slow path marking an object during a read barrier.
577class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
578 public:
579 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000580 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000581 DCHECK(kEmitCompilerReadBarrier);
582 }
583
584 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
585
586 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
587 LocationSummary* locations = instruction_->GetLocations();
588 Primitive::Type type = Primitive::kPrimNot;
589 DCHECK(locations->CanCall());
590 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
591 DCHECK(instruction_->IsInstanceFieldGet() ||
592 instruction_->IsStaticFieldGet() ||
593 instruction_->IsArrayGet() ||
594 instruction_->IsLoadClass() ||
595 instruction_->IsLoadString() ||
596 instruction_->IsInstanceOf() ||
597 instruction_->IsCheckCast())
598 << "Unexpected instruction in read barrier marking slow path: "
599 << instruction_->DebugName();
600
601 __ Bind(GetEntryLabel());
602 SaveLiveRegisters(codegen, locations);
603
604 InvokeRuntimeCallingConvention calling_convention;
605 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
606 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
607 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
608 instruction_,
609 instruction_->GetDexPc(),
610 this);
611 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
612 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
613
614 RestoreLiveRegisters(codegen, locations);
615 __ B(GetExitLabel());
616 }
617
618 private:
Roland Levillain44015862016-01-22 11:47:17 +0000619 const Location out_;
620 const Location obj_;
621
622 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
623};
624
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000625// Slow path generating a read barrier for a heap reference.
626class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
627 public:
628 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
629 Location out,
630 Location ref,
631 Location obj,
632 uint32_t offset,
633 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000634 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000635 out_(out),
636 ref_(ref),
637 obj_(obj),
638 offset_(offset),
639 index_(index) {
640 DCHECK(kEmitCompilerReadBarrier);
641 // If `obj` is equal to `out` or `ref`, it means the initial object
642 // has been overwritten by (or after) the heap object reference load
643 // to be instrumented, e.g.:
644 //
645 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000646 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000647 //
648 // In that case, we have lost the information about the original
649 // object, and the emitted read barrier cannot work properly.
650 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
651 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
652 }
653
654 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
655 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
656 LocationSummary* locations = instruction_->GetLocations();
657 Primitive::Type type = Primitive::kPrimNot;
658 DCHECK(locations->CanCall());
659 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
660 DCHECK(!instruction_->IsInvoke() ||
661 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain44015862016-01-22 11:47:17 +0000662 instruction_->GetLocations()->Intrinsified()))
663 << "Unexpected instruction in read barrier for heap reference slow path: "
664 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000665 // The read barrier instrumentation does not support the
666 // HArm64IntermediateAddress instruction yet.
667 DCHECK(!(instruction_->IsArrayGet() &&
668 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000669
670 __ Bind(GetEntryLabel());
671
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000672 SaveLiveRegisters(codegen, locations);
673
674 // We may have to change the index's value, but as `index_` is a
675 // constant member (like other "inputs" of this slow path),
676 // introduce a copy of it, `index`.
677 Location index = index_;
678 if (index_.IsValid()) {
679 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
680 if (instruction_->IsArrayGet()) {
681 // Compute the actual memory offset and store it in `index`.
682 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
683 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
684 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
685 // We are about to change the value of `index_reg` (see the
686 // calls to vixl::MacroAssembler::Lsl and
687 // vixl::MacroAssembler::Mov below), but it has
688 // not been saved by the previous call to
689 // art::SlowPathCode::SaveLiveRegisters, as it is a
690 // callee-save register --
691 // art::SlowPathCode::SaveLiveRegisters does not consider
692 // callee-save registers, as it has been designed with the
693 // assumption that callee-save registers are supposed to be
694 // handled by the called function. So, as a callee-save
695 // register, `index_reg` _would_ eventually be saved onto
696 // the stack, but it would be too late: we would have
697 // changed its value earlier. Therefore, we manually save
698 // it here into another freely available register,
699 // `free_reg`, chosen of course among the caller-save
700 // registers (as a callee-save `free_reg` register would
701 // exhibit the same problem).
702 //
703 // Note we could have requested a temporary register from
704 // the register allocator instead; but we prefer not to, as
705 // this is a slow path, and we know we can find a
706 // caller-save register that is available.
707 Register free_reg = FindAvailableCallerSaveRegister(codegen);
708 __ Mov(free_reg.W(), index_reg);
709 index_reg = free_reg;
710 index = LocationFrom(index_reg);
711 } else {
712 // The initial register stored in `index_` has already been
713 // saved in the call to art::SlowPathCode::SaveLiveRegisters
714 // (as it is not a callee-save register), so we can freely
715 // use it.
716 }
717 // Shifting the index value contained in `index_reg` by the scale
718 // factor (2) cannot overflow in practice, as the runtime is
719 // unable to allocate object arrays with a size larger than
720 // 2^26 - 1 (that is, 2^28 - 4 bytes).
721 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
722 static_assert(
723 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
724 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
725 __ Add(index_reg, index_reg, Operand(offset_));
726 } else {
727 DCHECK(instruction_->IsInvoke());
728 DCHECK(instruction_->GetLocations()->Intrinsified());
729 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
730 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
731 << instruction_->AsInvoke()->GetIntrinsic();
732 DCHECK_EQ(offset_, 0U);
733 DCHECK(index_.IsRegisterPair());
734 // UnsafeGet's offset location is a register pair, the low
735 // part contains the correct offset.
736 index = index_.ToLow();
737 }
738 }
739
740 // We're moving two or three locations to locations that could
741 // overlap, so we need a parallel move resolver.
742 InvokeRuntimeCallingConvention calling_convention;
743 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
744 parallel_move.AddMove(ref_,
745 LocationFrom(calling_convention.GetRegisterAt(0)),
746 type,
747 nullptr);
748 parallel_move.AddMove(obj_,
749 LocationFrom(calling_convention.GetRegisterAt(1)),
750 type,
751 nullptr);
752 if (index.IsValid()) {
753 parallel_move.AddMove(index,
754 LocationFrom(calling_convention.GetRegisterAt(2)),
755 Primitive::kPrimInt,
756 nullptr);
757 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
758 } else {
759 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
760 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
761 }
762 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
763 instruction_,
764 instruction_->GetDexPc(),
765 this);
766 CheckEntrypointTypes<
767 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
768 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
769
770 RestoreLiveRegisters(codegen, locations);
771
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000772 __ B(GetExitLabel());
773 }
774
775 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
776
777 private:
778 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
779 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
780 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
781 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
782 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
783 return Register(VIXLRegCodeFromART(i), kXRegSize);
784 }
785 }
786 // We shall never fail to find a free caller-save register, as
787 // there are more than two core caller-save registers on ARM64
788 // (meaning it is possible to find one which is different from
789 // `ref` and `obj`).
790 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
791 LOG(FATAL) << "Could not find a free register";
792 UNREACHABLE();
793 }
794
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000795 const Location out_;
796 const Location ref_;
797 const Location obj_;
798 const uint32_t offset_;
799 // An additional location containing an index to an array.
800 // Only used for HArrayGet and the UnsafeGetObject &
801 // UnsafeGetObjectVolatile intrinsics.
802 const Location index_;
803
804 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
805};
806
807// Slow path generating a read barrier for a GC root.
808class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
809 public:
810 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000811 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000812 DCHECK(kEmitCompilerReadBarrier);
813 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000814
815 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
816 LocationSummary* locations = instruction_->GetLocations();
817 Primitive::Type type = Primitive::kPrimNot;
818 DCHECK(locations->CanCall());
819 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000820 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
821 << "Unexpected instruction in read barrier for GC root slow path: "
822 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000823
824 __ Bind(GetEntryLabel());
825 SaveLiveRegisters(codegen, locations);
826
827 InvokeRuntimeCallingConvention calling_convention;
828 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
829 // The argument of the ReadBarrierForRootSlow is not a managed
830 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
831 // thus we need a 64-bit move here, and we cannot use
832 //
833 // arm64_codegen->MoveLocation(
834 // LocationFrom(calling_convention.GetRegisterAt(0)),
835 // root_,
836 // type);
837 //
838 // which would emit a 32-bit move, as `type` is a (32-bit wide)
839 // reference type (`Primitive::kPrimNot`).
840 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
841 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
842 instruction_,
843 instruction_->GetDexPc(),
844 this);
845 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
846 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
847
848 RestoreLiveRegisters(codegen, locations);
849 __ B(GetExitLabel());
850 }
851
852 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
853
854 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000855 const Location out_;
856 const Location root_;
857
858 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
859};
860
Alexandre Rames5319def2014-10-23 10:03:10 +0100861#undef __
862
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100863Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100864 Location next_location;
865 if (type == Primitive::kPrimVoid) {
866 LOG(FATAL) << "Unreachable type " << type;
867 }
868
Alexandre Rames542361f2015-01-29 16:57:31 +0000869 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100870 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
871 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000872 } else if (!Primitive::IsFloatingPointType(type) &&
873 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000874 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
875 } else {
876 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000877 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
878 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100879 }
880
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000881 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000882 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100883 return next_location;
884}
885
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100886Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100887 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100888}
889
Serban Constantinescu579885a2015-02-22 20:51:33 +0000890CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
891 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100892 const CompilerOptions& compiler_options,
893 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100894 : CodeGenerator(graph,
895 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000896 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000897 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000898 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000899 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100900 compiler_options,
901 stats),
Alexandre Rames5319def2014-10-23 10:03:10 +0100902 block_labels_(nullptr),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800903 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100904 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000905 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000906 move_resolver_(graph->GetArena(), this),
Vladimir Marko58155012015-08-19 12:49:41 +0000907 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000908 uint32_literals_(std::less<uint32_t>(),
909 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100910 uint64_literals_(std::less<uint64_t>(),
911 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
912 method_patches_(MethodReferenceComparator(),
913 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
914 call_patches_(MethodReferenceComparator(),
915 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
916 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000917 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
918 boot_image_string_patches_(StringReferenceValueComparator(),
919 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
920 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
921 boot_image_address_patches_(std::less<uint32_t>(),
922 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000923 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000924 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000925}
Alexandre Rames5319def2014-10-23 10:03:10 +0100926
Alexandre Rames67555f72014-11-18 10:55:16 +0000927#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100928
Zheng Xu3927c8b2015-11-18 17:46:25 +0800929void CodeGeneratorARM64::EmitJumpTables() {
930 for (auto jump_table : jump_tables_) {
931 jump_table->EmitTable(this);
932 }
933}
934
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000935void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800936 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000937 // Ensure we emit the literal pool.
938 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000939
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000940 CodeGenerator::Finalize(allocator);
941}
942
Zheng Xuad4450e2015-04-17 18:48:56 +0800943void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
944 // Note: There are 6 kinds of moves:
945 // 1. constant -> GPR/FPR (non-cycle)
946 // 2. constant -> stack (non-cycle)
947 // 3. GPR/FPR -> GPR/FPR
948 // 4. GPR/FPR -> stack
949 // 5. stack -> GPR/FPR
950 // 6. stack -> stack (non-cycle)
951 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
952 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
953 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
954 // dependency.
955 vixl_temps_.Open(GetVIXLAssembler());
956}
957
958void ParallelMoveResolverARM64::FinishEmitNativeCode() {
959 vixl_temps_.Close();
960}
961
962Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
963 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
964 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
965 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
966 Location scratch = GetScratchLocation(kind);
967 if (!scratch.Equals(Location::NoLocation())) {
968 return scratch;
969 }
970 // Allocate from VIXL temp registers.
971 if (kind == Location::kRegister) {
972 scratch = LocationFrom(vixl_temps_.AcquireX());
973 } else {
974 DCHECK(kind == Location::kFpuRegister);
975 scratch = LocationFrom(vixl_temps_.AcquireD());
976 }
977 AddScratchLocation(scratch);
978 return scratch;
979}
980
981void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
982 if (loc.IsRegister()) {
983 vixl_temps_.Release(XRegisterFrom(loc));
984 } else {
985 DCHECK(loc.IsFpuRegister());
986 vixl_temps_.Release(DRegisterFrom(loc));
987 }
988 RemoveScratchLocation(loc);
989}
990
Alexandre Rames3e69f162014-12-10 10:36:50 +0000991void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100992 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +0100993 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000994}
995
Alexandre Rames5319def2014-10-23 10:03:10 +0100996void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100997 MacroAssembler* masm = GetVIXLAssembler();
998 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000999 __ Bind(&frame_entry_label_);
1000
Serban Constantinescu02164b32014-11-13 14:05:07 +00001001 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1002 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001003 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001004 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001005 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001006 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001007 __ Ldr(wzr, MemOperand(temp, 0));
1008 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001009 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001010
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001011 if (!HasEmptyFrame()) {
1012 int frame_size = GetFrameSize();
1013 // Stack layout:
1014 // sp[frame_size - 8] : lr.
1015 // ... : other preserved core registers.
1016 // ... : other preserved fp registers.
1017 // ... : reserved frame space.
1018 // sp[0] : current method.
1019 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001020 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001021 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1022 frame_size - GetCoreSpillSize());
1023 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1024 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001025 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001026}
1027
1028void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001029 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001030 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001031 if (!HasEmptyFrame()) {
1032 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001033 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1034 frame_size - FrameEntrySpillSize());
1035 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1036 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001037 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001038 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001039 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001040 __ Ret();
1041 GetAssembler()->cfi().RestoreState();
1042 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001043}
1044
Zheng Xuda403092015-04-24 17:35:39 +08001045vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1046 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1047 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1048 core_spill_mask_);
1049}
1050
1051vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1052 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1053 GetNumberOfFloatingPointRegisters()));
1054 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1055 fpu_spill_mask_);
1056}
1057
Alexandre Rames5319def2014-10-23 10:03:10 +01001058void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1059 __ Bind(GetLabelOf(block));
1060}
1061
Calin Juravle175dc732015-08-25 15:42:32 +01001062void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1063 DCHECK(location.IsRegister());
1064 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1065}
1066
Calin Juravlee460d1d2015-09-29 04:52:17 +01001067void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1068 if (location.IsRegister()) {
1069 locations->AddTemp(location);
1070 } else {
1071 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1072 }
1073}
1074
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001075void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001076 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001077 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001078 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001079 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001080 if (value_can_be_null) {
1081 __ Cbz(value, &done);
1082 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001083 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1084 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001085 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001086 if (value_can_be_null) {
1087 __ Bind(&done);
1088 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001089}
1090
David Brazdil58282f42016-01-14 12:45:10 +00001091void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001092 // Blocked core registers:
1093 // lr : Runtime reserved.
1094 // tr : Runtime reserved.
1095 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1096 // ip1 : VIXL core temp.
1097 // ip0 : VIXL core temp.
1098 //
1099 // Blocked fp registers:
1100 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001101 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1102 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 while (!reserved_core_registers.IsEmpty()) {
1104 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1105 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001106
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001107 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001108 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001109 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1110 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001111
David Brazdil58282f42016-01-14 12:45:10 +00001112 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001113 // Stubs do not save callee-save floating point registers. If the graph
1114 // is debuggable, we need to deal with these registers differently. For
1115 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001116 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1117 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1118 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001119 }
1120 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001121}
1122
Alexandre Rames3e69f162014-12-10 10:36:50 +00001123size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1124 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1125 __ Str(reg, MemOperand(sp, stack_index));
1126 return kArm64WordSize;
1127}
1128
1129size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1130 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1131 __ Ldr(reg, MemOperand(sp, stack_index));
1132 return kArm64WordSize;
1133}
1134
1135size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1136 FPRegister reg = FPRegister(reg_id, kDRegSize);
1137 __ Str(reg, MemOperand(sp, stack_index));
1138 return kArm64WordSize;
1139}
1140
1141size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1142 FPRegister reg = FPRegister(reg_id, kDRegSize);
1143 __ Ldr(reg, MemOperand(sp, stack_index));
1144 return kArm64WordSize;
1145}
1146
Alexandre Rames5319def2014-10-23 10:03:10 +01001147void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001148 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001149}
1150
1151void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001152 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001153}
1154
Alexandre Rames67555f72014-11-18 10:55:16 +00001155void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001156 if (constant->IsIntConstant()) {
1157 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1158 } else if (constant->IsLongConstant()) {
1159 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1160 } else if (constant->IsNullConstant()) {
1161 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001162 } else if (constant->IsFloatConstant()) {
1163 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1164 } else {
1165 DCHECK(constant->IsDoubleConstant());
1166 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1167 }
1168}
1169
Alexandre Rames3e69f162014-12-10 10:36:50 +00001170
1171static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1172 DCHECK(constant.IsConstant());
1173 HConstant* cst = constant.GetConstant();
1174 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001175 // Null is mapped to a core W register, which we associate with kPrimInt.
1176 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001177 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1178 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1179 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1180}
1181
Calin Juravlee460d1d2015-09-29 04:52:17 +01001182void CodeGeneratorARM64::MoveLocation(Location destination,
1183 Location source,
1184 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001185 if (source.Equals(destination)) {
1186 return;
1187 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001188
1189 // A valid move can always be inferred from the destination and source
1190 // locations. When moving from and to a register, the argument type can be
1191 // used to generate 32bit instead of 64bit moves. In debug mode we also
1192 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001193 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001194
1195 if (destination.IsRegister() || destination.IsFpuRegister()) {
1196 if (unspecified_type) {
1197 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1198 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001199 (src_cst != nullptr && (src_cst->IsIntConstant()
1200 || src_cst->IsFloatConstant()
1201 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001202 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001203 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001204 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001205 // If the source is a double stack slot or a 64bit constant, a 64bit
1206 // type is appropriate. Else the source is a register, and since the
1207 // type has not been specified, we chose a 64bit type to force a 64bit
1208 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001209 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001210 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001211 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001212 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1213 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1214 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001215 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1216 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1217 __ Ldr(dst, StackOperandFrom(source));
1218 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001219 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001222 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001223 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001224 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001225 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001226 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1227 ? Primitive::kPrimLong
1228 : Primitive::kPrimInt;
1229 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1230 }
1231 } else {
1232 DCHECK(source.IsFpuRegister());
1233 if (destination.IsRegister()) {
1234 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1235 ? Primitive::kPrimDouble
1236 : Primitive::kPrimFloat;
1237 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1238 } else {
1239 DCHECK(destination.IsFpuRegister());
1240 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001241 }
1242 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001243 } else { // The destination is not a register. It must be a stack slot.
1244 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1245 if (source.IsRegister() || source.IsFpuRegister()) {
1246 if (unspecified_type) {
1247 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001248 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001249 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001250 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001251 }
1252 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001253 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1254 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1255 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001256 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001257 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1258 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 UseScratchRegisterScope temps(GetVIXLAssembler());
1260 HConstant* src_cst = source.GetConstant();
1261 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001262 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001263 temp = temps.AcquireW();
1264 } else if (src_cst->IsLongConstant()) {
1265 temp = temps.AcquireX();
1266 } else if (src_cst->IsFloatConstant()) {
1267 temp = temps.AcquireS();
1268 } else {
1269 DCHECK(src_cst->IsDoubleConstant());
1270 temp = temps.AcquireD();
1271 }
1272 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001273 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001274 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001275 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001276 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001277 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001278 // There is generally less pressure on FP registers.
1279 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001280 __ Ldr(temp, StackOperandFrom(source));
1281 __ Str(temp, StackOperandFrom(destination));
1282 }
1283 }
1284}
1285
1286void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001287 CPURegister dst,
1288 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001289 switch (type) {
1290 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001291 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001292 break;
1293 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001294 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001295 break;
1296 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001297 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001298 break;
1299 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001300 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001301 break;
1302 case Primitive::kPrimInt:
1303 case Primitive::kPrimNot:
1304 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001305 case Primitive::kPrimFloat:
1306 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001307 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001308 __ Ldr(dst, src);
1309 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001310 case Primitive::kPrimVoid:
1311 LOG(FATAL) << "Unreachable type " << type;
1312 }
1313}
1314
Calin Juravle77520bc2015-01-12 18:45:46 +00001315void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001316 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001317 const MemOperand& src,
1318 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001319 MacroAssembler* masm = GetVIXLAssembler();
1320 BlockPoolsScope block_pools(masm);
1321 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001322 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001323 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001324
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001325 DCHECK(!src.IsPreIndex());
1326 DCHECK(!src.IsPostIndex());
1327
1328 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001329 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001330 MemOperand base = MemOperand(temp_base);
1331 switch (type) {
1332 case Primitive::kPrimBoolean:
1333 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001334 if (needs_null_check) {
1335 MaybeRecordImplicitNullCheck(instruction);
1336 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001337 break;
1338 case Primitive::kPrimByte:
1339 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001340 if (needs_null_check) {
1341 MaybeRecordImplicitNullCheck(instruction);
1342 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001343 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1344 break;
1345 case Primitive::kPrimChar:
1346 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001347 if (needs_null_check) {
1348 MaybeRecordImplicitNullCheck(instruction);
1349 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001350 break;
1351 case Primitive::kPrimShort:
1352 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001353 if (needs_null_check) {
1354 MaybeRecordImplicitNullCheck(instruction);
1355 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001356 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1357 break;
1358 case Primitive::kPrimInt:
1359 case Primitive::kPrimNot:
1360 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001361 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001363 if (needs_null_check) {
1364 MaybeRecordImplicitNullCheck(instruction);
1365 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001366 break;
1367 case Primitive::kPrimFloat:
1368 case Primitive::kPrimDouble: {
1369 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001370 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001371
1372 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1373 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001374 if (needs_null_check) {
1375 MaybeRecordImplicitNullCheck(instruction);
1376 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001377 __ Fmov(FPRegister(dst), temp);
1378 break;
1379 }
1380 case Primitive::kPrimVoid:
1381 LOG(FATAL) << "Unreachable type " << type;
1382 }
1383}
1384
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001385void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001386 CPURegister src,
1387 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001388 switch (type) {
1389 case Primitive::kPrimBoolean:
1390 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001391 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001392 break;
1393 case Primitive::kPrimChar:
1394 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001395 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001396 break;
1397 case Primitive::kPrimInt:
1398 case Primitive::kPrimNot:
1399 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001400 case Primitive::kPrimFloat:
1401 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001402 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001404 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001405 case Primitive::kPrimVoid:
1406 LOG(FATAL) << "Unreachable type " << type;
1407 }
1408}
1409
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001410void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1411 CPURegister src,
1412 const MemOperand& dst) {
1413 UseScratchRegisterScope temps(GetVIXLAssembler());
1414 Register temp_base = temps.AcquireX();
1415
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001416 DCHECK(!dst.IsPreIndex());
1417 DCHECK(!dst.IsPostIndex());
1418
1419 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001420 Operand op = OperandFromMemOperand(dst);
1421 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001422 MemOperand base = MemOperand(temp_base);
1423 switch (type) {
1424 case Primitive::kPrimBoolean:
1425 case Primitive::kPrimByte:
1426 __ Stlrb(Register(src), base);
1427 break;
1428 case Primitive::kPrimChar:
1429 case Primitive::kPrimShort:
1430 __ Stlrh(Register(src), base);
1431 break;
1432 case Primitive::kPrimInt:
1433 case Primitive::kPrimNot:
1434 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001435 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001436 __ Stlr(Register(src), base);
1437 break;
1438 case Primitive::kPrimFloat:
1439 case Primitive::kPrimDouble: {
1440 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001441 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001442
1443 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1444 __ Fmov(temp, FPRegister(src));
1445 __ Stlr(temp, base);
1446 break;
1447 }
1448 case Primitive::kPrimVoid:
1449 LOG(FATAL) << "Unreachable type " << type;
1450 }
1451}
1452
Calin Juravle175dc732015-08-25 15:42:32 +01001453void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1454 HInstruction* instruction,
1455 uint32_t dex_pc,
1456 SlowPathCode* slow_path) {
1457 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1458 instruction,
1459 dex_pc,
1460 slow_path);
1461}
1462
Alexandre Rames67555f72014-11-18 10:55:16 +00001463void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1464 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001465 uint32_t dex_pc,
1466 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001467 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001468 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001469 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1470 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001471 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001472}
1473
1474void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1475 vixl::Register class_reg) {
1476 UseScratchRegisterScope temps(GetVIXLAssembler());
1477 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001478 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1479
Serban Constantinescu02164b32014-11-13 14:05:07 +00001480 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001481 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1482 __ Add(temp, class_reg, status_offset);
1483 __ Ldar(temp, HeapOperand(temp));
1484 __ Cmp(temp, mirror::Class::kStatusInitialized);
1485 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001486 __ Bind(slow_path->GetExitLabel());
1487}
Alexandre Rames5319def2014-10-23 10:03:10 +01001488
Roland Levillain44015862016-01-22 11:47:17 +00001489void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001490 BarrierType type = BarrierAll;
1491
1492 switch (kind) {
1493 case MemBarrierKind::kAnyAny:
1494 case MemBarrierKind::kAnyStore: {
1495 type = BarrierAll;
1496 break;
1497 }
1498 case MemBarrierKind::kLoadAny: {
1499 type = BarrierReads;
1500 break;
1501 }
1502 case MemBarrierKind::kStoreStore: {
1503 type = BarrierWrites;
1504 break;
1505 }
1506 default:
1507 LOG(FATAL) << "Unexpected memory barrier " << kind;
1508 }
1509 __ Dmb(InnerShareable, type);
1510}
1511
Serban Constantinescu02164b32014-11-13 14:05:07 +00001512void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1513 HBasicBlock* successor) {
1514 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001515 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1516 if (slow_path == nullptr) {
1517 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1518 instruction->SetSlowPath(slow_path);
1519 codegen_->AddSlowPath(slow_path);
1520 if (successor != nullptr) {
1521 DCHECK(successor->IsLoopHeader());
1522 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1523 }
1524 } else {
1525 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1526 }
1527
Serban Constantinescu02164b32014-11-13 14:05:07 +00001528 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1529 Register temp = temps.AcquireW();
1530
1531 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1532 if (successor == nullptr) {
1533 __ Cbnz(temp, slow_path->GetEntryLabel());
1534 __ Bind(slow_path->GetReturnLabel());
1535 } else {
1536 __ Cbz(temp, codegen_->GetLabelOf(successor));
1537 __ B(slow_path->GetEntryLabel());
1538 // slow_path will return to GetLabelOf(successor).
1539 }
1540}
1541
Alexandre Rames5319def2014-10-23 10:03:10 +01001542InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1543 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001544 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001545 assembler_(codegen->GetAssembler()),
1546 codegen_(codegen) {}
1547
1548#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001549 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001550
1551#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1552
1553enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001554 // Using a base helps identify when we hit such breakpoints.
1555 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001556#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1557 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1558#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1559};
1560
1561#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001562 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001563 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1564 } \
1565 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1566 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1567 locations->SetOut(Location::Any()); \
1568 }
1569 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1570#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1571
1572#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001573#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001574
Alexandre Rames67555f72014-11-18 10:55:16 +00001575void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001576 DCHECK_EQ(instr->InputCount(), 2U);
1577 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1578 Primitive::Type type = instr->GetResultType();
1579 switch (type) {
1580 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001581 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001582 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001583 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001584 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001585 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001586
1587 case Primitive::kPrimFloat:
1588 case Primitive::kPrimDouble:
1589 locations->SetInAt(0, Location::RequiresFpuRegister());
1590 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001591 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001592 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001593
Alexandre Rames5319def2014-10-23 10:03:10 +01001594 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001595 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001596 }
1597}
1598
Alexandre Rames09a99962015-04-15 11:47:56 +01001599void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001600 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1601
1602 bool object_field_get_with_read_barrier =
1603 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001604 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001605 new (GetGraph()->GetArena()) LocationSummary(instruction,
1606 object_field_get_with_read_barrier ?
1607 LocationSummary::kCallOnSlowPath :
1608 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001609 locations->SetInAt(0, Location::RequiresRegister());
1610 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1611 locations->SetOut(Location::RequiresFpuRegister());
1612 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001613 // The output overlaps for an object field get when read barriers
1614 // are enabled: we do not want the load to overwrite the object's
1615 // location, as we need it to emit the read barrier.
1616 locations->SetOut(
1617 Location::RequiresRegister(),
1618 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001619 }
1620}
1621
1622void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1623 const FieldInfo& field_info) {
1624 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001625 LocationSummary* locations = instruction->GetLocations();
1626 Location base_loc = locations->InAt(0);
1627 Location out = locations->Out();
1628 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001629 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001630 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001631 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001632
Roland Levillain44015862016-01-22 11:47:17 +00001633 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1634 // Object FieldGet with Baker's read barrier case.
1635 MacroAssembler* masm = GetVIXLAssembler();
1636 UseScratchRegisterScope temps(masm);
1637 // /* HeapReference<Object> */ out = *(base + offset)
1638 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1639 Register temp = temps.AcquireW();
1640 // Note that potential implicit null checks are handled in this
1641 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1642 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1643 instruction,
1644 out,
1645 base,
1646 offset,
1647 temp,
1648 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001649 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001650 } else {
1651 // General case.
1652 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001653 // Note that a potential implicit null check is handled in this
1654 // CodeGeneratorARM64::LoadAcquire call.
1655 // NB: LoadAcquire will record the pc info if needed.
1656 codegen_->LoadAcquire(
1657 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001658 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001659 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001660 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001661 }
Roland Levillain44015862016-01-22 11:47:17 +00001662 if (field_type == Primitive::kPrimNot) {
1663 // If read barriers are enabled, emit read barriers other than
1664 // Baker's using a slow path (and also unpoison the loaded
1665 // reference, if heap poisoning is enabled).
1666 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1667 }
Roland Levillain4d027112015-07-01 15:41:14 +01001668 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001669}
1670
1671void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1672 LocationSummary* locations =
1673 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1674 locations->SetInAt(0, Location::RequiresRegister());
1675 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1676 locations->SetInAt(1, Location::RequiresFpuRegister());
1677 } else {
1678 locations->SetInAt(1, Location::RequiresRegister());
1679 }
1680}
1681
1682void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001683 const FieldInfo& field_info,
1684 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001685 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001686 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001687
1688 Register obj = InputRegisterAt(instruction, 0);
1689 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001690 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001691 Offset offset = field_info.GetFieldOffset();
1692 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001693
Roland Levillain4d027112015-07-01 15:41:14 +01001694 {
1695 // We use a block to end the scratch scope before the write barrier, thus
1696 // freeing the temporary registers so they can be used in `MarkGCCard`.
1697 UseScratchRegisterScope temps(GetVIXLAssembler());
1698
1699 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1700 DCHECK(value.IsW());
1701 Register temp = temps.AcquireW();
1702 __ Mov(temp, value.W());
1703 GetAssembler()->PoisonHeapReference(temp.W());
1704 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001705 }
Roland Levillain4d027112015-07-01 15:41:14 +01001706
1707 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001708 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1709 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001710 } else {
1711 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1712 codegen_->MaybeRecordImplicitNullCheck(instruction);
1713 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001714 }
1715
1716 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001717 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001718 }
1719}
1720
Alexandre Rames67555f72014-11-18 10:55:16 +00001721void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001722 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001723
1724 switch (type) {
1725 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001726 case Primitive::kPrimLong: {
1727 Register dst = OutputRegister(instr);
1728 Register lhs = InputRegisterAt(instr, 0);
1729 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001730 if (instr->IsAdd()) {
1731 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001732 } else if (instr->IsAnd()) {
1733 __ And(dst, lhs, rhs);
1734 } else if (instr->IsOr()) {
1735 __ Orr(dst, lhs, rhs);
1736 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001737 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001738 } else if (instr->IsRor()) {
1739 if (rhs.IsImmediate()) {
1740 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1741 __ Ror(dst, lhs, shift);
1742 } else {
1743 // Ensure shift distance is in the same size register as the result. If
1744 // we are rotating a long and the shift comes in a w register originally,
1745 // we don't need to sxtw for use as an x since the shift distances are
1746 // all & reg_bits - 1.
1747 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1748 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001749 } else {
1750 DCHECK(instr->IsXor());
1751 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001752 }
1753 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001754 }
1755 case Primitive::kPrimFloat:
1756 case Primitive::kPrimDouble: {
1757 FPRegister dst = OutputFPRegister(instr);
1758 FPRegister lhs = InputFPRegisterAt(instr, 0);
1759 FPRegister rhs = InputFPRegisterAt(instr, 1);
1760 if (instr->IsAdd()) {
1761 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001762 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001763 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001764 } else {
1765 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001766 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001767 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001768 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001770 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001771 }
1772}
1773
Serban Constantinescu02164b32014-11-13 14:05:07 +00001774void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1775 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1776
1777 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1778 Primitive::Type type = instr->GetResultType();
1779 switch (type) {
1780 case Primitive::kPrimInt:
1781 case Primitive::kPrimLong: {
1782 locations->SetInAt(0, Location::RequiresRegister());
1783 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1784 locations->SetOut(Location::RequiresRegister());
1785 break;
1786 }
1787 default:
1788 LOG(FATAL) << "Unexpected shift type " << type;
1789 }
1790}
1791
1792void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1793 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1794
1795 Primitive::Type type = instr->GetType();
1796 switch (type) {
1797 case Primitive::kPrimInt:
1798 case Primitive::kPrimLong: {
1799 Register dst = OutputRegister(instr);
1800 Register lhs = InputRegisterAt(instr, 0);
1801 Operand rhs = InputOperandAt(instr, 1);
1802 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001803 uint32_t shift_value = rhs.immediate() &
1804 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001805 if (instr->IsShl()) {
1806 __ Lsl(dst, lhs, shift_value);
1807 } else if (instr->IsShr()) {
1808 __ Asr(dst, lhs, shift_value);
1809 } else {
1810 __ Lsr(dst, lhs, shift_value);
1811 }
1812 } else {
1813 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1814
1815 if (instr->IsShl()) {
1816 __ Lsl(dst, lhs, rhs_reg);
1817 } else if (instr->IsShr()) {
1818 __ Asr(dst, lhs, rhs_reg);
1819 } else {
1820 __ Lsr(dst, lhs, rhs_reg);
1821 }
1822 }
1823 break;
1824 }
1825 default:
1826 LOG(FATAL) << "Unexpected shift operation type " << type;
1827 }
1828}
1829
Alexandre Rames5319def2014-10-23 10:03:10 +01001830void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001831 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001832}
1833
1834void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001835 HandleBinaryOp(instruction);
1836}
1837
1838void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1839 HandleBinaryOp(instruction);
1840}
1841
1842void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1843 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001844}
1845
Artem Serov7fc63502016-02-09 17:15:29 +00001846void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001847 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1848 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1849 locations->SetInAt(0, Location::RequiresRegister());
1850 // There is no immediate variant of negated bitwise instructions in AArch64.
1851 locations->SetInAt(1, Location::RequiresRegister());
1852 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1853}
1854
Artem Serov7fc63502016-02-09 17:15:29 +00001855void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001856 Register dst = OutputRegister(instr);
1857 Register lhs = InputRegisterAt(instr, 0);
1858 Register rhs = InputRegisterAt(instr, 1);
1859
1860 switch (instr->GetOpKind()) {
1861 case HInstruction::kAnd:
1862 __ Bic(dst, lhs, rhs);
1863 break;
1864 case HInstruction::kOr:
1865 __ Orn(dst, lhs, rhs);
1866 break;
1867 case HInstruction::kXor:
1868 __ Eon(dst, lhs, rhs);
1869 break;
1870 default:
1871 LOG(FATAL) << "Unreachable";
1872 }
1873}
1874
Alexandre Rames8626b742015-11-25 16:28:08 +00001875void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1876 HArm64DataProcWithShifterOp* instruction) {
1877 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1878 instruction->GetType() == Primitive::kPrimLong);
1879 LocationSummary* locations =
1880 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1881 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1882 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1883 } else {
1884 locations->SetInAt(0, Location::RequiresRegister());
1885 }
1886 locations->SetInAt(1, Location::RequiresRegister());
1887 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1888}
1889
1890void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1891 HArm64DataProcWithShifterOp* instruction) {
1892 Primitive::Type type = instruction->GetType();
1893 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1894 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1895 Register out = OutputRegister(instruction);
1896 Register left;
1897 if (kind != HInstruction::kNeg) {
1898 left = InputRegisterAt(instruction, 0);
1899 }
1900 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1901 // shifter operand operation, the IR generating `right_reg` (input to the type
1902 // conversion) can have a different type from the current instruction's type,
1903 // so we manually indicate the type.
1904 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001905 int64_t shift_amount = instruction->GetShiftAmount() &
1906 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001907
1908 Operand right_operand(0);
1909
1910 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1911 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1912 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1913 } else {
1914 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1915 }
1916
1917 // Logical binary operations do not support extension operations in the
1918 // operand. Note that VIXL would still manage if it was passed by generating
1919 // the extension as a separate instruction.
1920 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1921 DCHECK(!right_operand.IsExtendedRegister() ||
1922 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1923 kind != HInstruction::kNeg));
1924 switch (kind) {
1925 case HInstruction::kAdd:
1926 __ Add(out, left, right_operand);
1927 break;
1928 case HInstruction::kAnd:
1929 __ And(out, left, right_operand);
1930 break;
1931 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001932 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001933 __ Neg(out, right_operand);
1934 break;
1935 case HInstruction::kOr:
1936 __ Orr(out, left, right_operand);
1937 break;
1938 case HInstruction::kSub:
1939 __ Sub(out, left, right_operand);
1940 break;
1941 case HInstruction::kXor:
1942 __ Eor(out, left, right_operand);
1943 break;
1944 default:
1945 LOG(FATAL) << "Unexpected operation kind: " << kind;
1946 UNREACHABLE();
1947 }
1948}
1949
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001950void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001951 // The read barrier instrumentation does not support the
1952 // HArm64IntermediateAddress instruction yet.
1953 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001954 LocationSummary* locations =
1955 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1956 locations->SetInAt(0, Location::RequiresRegister());
1957 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1958 locations->SetOut(Location::RequiresRegister());
1959}
1960
1961void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1962 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001963 // The read barrier instrumentation does not support the
1964 // HArm64IntermediateAddress instruction yet.
1965 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001966 __ Add(OutputRegister(instruction),
1967 InputRegisterAt(instruction, 0),
1968 Operand(InputOperandAt(instruction, 1)));
1969}
1970
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001971void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001972 LocationSummary* locations =
1973 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001974 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1975 if (instr->GetOpKind() == HInstruction::kSub &&
1976 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001977 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001978 // Don't allocate register for Mneg instruction.
1979 } else {
1980 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1981 Location::RequiresRegister());
1982 }
1983 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1984 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001985 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1986}
1987
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001988void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001989 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001990 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
1991 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00001992
1993 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
1994 // This fixup should be carried out for all multiply-accumulate instructions:
1995 // madd, msub, smaddl, smsubl, umaddl and umsubl.
1996 if (instr->GetType() == Primitive::kPrimLong &&
1997 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
1998 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
1999 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2000 if (prev->IsLoadOrStore()) {
2001 // Make sure we emit only exactly one nop.
2002 vixl::CodeBufferCheckScope scope(masm,
2003 vixl::kInstructionSize,
2004 vixl::CodeBufferCheckScope::kCheck,
2005 vixl::CodeBufferCheckScope::kExactSize);
2006 __ nop();
2007 }
2008 }
2009
2010 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002011 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002012 __ Madd(res, mul_left, mul_right, accumulator);
2013 } else {
2014 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002015 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002016 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002017 __ Mneg(res, mul_left, mul_right);
2018 } else {
2019 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2020 __ Msub(res, mul_left, mul_right, accumulator);
2021 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002022 }
2023}
2024
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002025void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002026 bool object_array_get_with_read_barrier =
2027 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002028 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002029 new (GetGraph()->GetArena()) LocationSummary(instruction,
2030 object_array_get_with_read_barrier ?
2031 LocationSummary::kCallOnSlowPath :
2032 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002033 locations->SetInAt(0, Location::RequiresRegister());
2034 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002035 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2036 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2037 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002038 // The output overlaps in the case of an object array get with
2039 // read barriers enabled: we do not want the move to overwrite the
2040 // array's location, as we need it to emit the read barrier.
2041 locations->SetOut(
2042 Location::RequiresRegister(),
2043 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002044 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002045}
2046
2047void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002048 Primitive::Type type = instruction->GetType();
2049 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002050 LocationSummary* locations = instruction->GetLocations();
2051 Location index = locations->InAt(1);
2052 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002053 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002054
Alexandre Ramesd921d642015-04-16 15:07:16 +01002055 MacroAssembler* masm = GetVIXLAssembler();
2056 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002057 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002058 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002059
Roland Levillain44015862016-01-22 11:47:17 +00002060 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2061 // Object ArrayGet with Baker's read barrier case.
2062 Register temp = temps.AcquireW();
2063 // The read barrier instrumentation does not support the
2064 // HArm64IntermediateAddress instruction yet.
2065 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2066 // Note that a potential implicit null check is handled in the
2067 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2068 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2069 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002070 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002071 // General case.
2072 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002073 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002074 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2075 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002076 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002077 Register temp = temps.AcquireSameSizeAs(obj);
2078 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2079 // The read barrier instrumentation does not support the
2080 // HArm64IntermediateAddress instruction yet.
2081 DCHECK(!kEmitCompilerReadBarrier);
2082 // We do not need to compute the intermediate address from the array: the
2083 // input instruction has done it already. See the comment in
2084 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2085 if (kIsDebugBuild) {
2086 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2087 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2088 }
2089 temp = obj;
2090 } else {
2091 __ Add(temp, obj, offset);
2092 }
2093 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2094 }
2095
2096 codegen_->Load(type, OutputCPURegister(instruction), source);
2097 codegen_->MaybeRecordImplicitNullCheck(instruction);
2098
2099 if (type == Primitive::kPrimNot) {
2100 static_assert(
2101 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2102 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2103 Location obj_loc = locations->InAt(0);
2104 if (index.IsConstant()) {
2105 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2106 } else {
2107 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2108 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002109 }
Roland Levillain4d027112015-07-01 15:41:14 +01002110 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002111}
2112
Alexandre Rames5319def2014-10-23 10:03:10 +01002113void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2114 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2115 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002116 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002117}
2118
2119void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01002120 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01002121 __ Ldr(OutputRegister(instruction),
2122 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00002123 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002124}
2125
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002126void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002127 Primitive::Type value_type = instruction->GetComponentType();
2128
2129 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2130 bool object_array_set_with_read_barrier =
2131 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002132 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2133 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002134 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2135 LocationSummary::kCallOnSlowPath :
2136 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002137 locations->SetInAt(0, Location::RequiresRegister());
2138 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002139 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002140 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002141 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002142 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002143 }
2144}
2145
2146void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2147 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002148 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002149 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002150 bool needs_write_barrier =
2151 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002152
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002153 Register array = InputRegisterAt(instruction, 0);
2154 CPURegister value = InputCPURegisterAt(instruction, 2);
2155 CPURegister source = value;
2156 Location index = locations->InAt(1);
2157 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2158 MemOperand destination = HeapOperand(array);
2159 MacroAssembler* masm = GetVIXLAssembler();
2160 BlockPoolsScope block_pools(masm);
2161
2162 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002163 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002164 if (index.IsConstant()) {
2165 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2166 destination = HeapOperand(array, offset);
2167 } else {
2168 UseScratchRegisterScope temps(masm);
2169 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002170 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002171 // The read barrier instrumentation does not support the
2172 // HArm64IntermediateAddress instruction yet.
2173 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002174 // We do not need to compute the intermediate address from the array: the
2175 // input instruction has done it already. See the comment in
2176 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2177 if (kIsDebugBuild) {
2178 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2179 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2180 }
2181 temp = array;
2182 } else {
2183 __ Add(temp, array, offset);
2184 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002185 destination = HeapOperand(temp,
2186 XRegisterFrom(index),
2187 LSL,
2188 Primitive::ComponentSizeShift(value_type));
2189 }
2190 codegen_->Store(value_type, value, destination);
2191 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002192 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002193 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002194 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002195 vixl::Label done;
2196 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002197 {
2198 // We use a block to end the scratch scope before the write barrier, thus
2199 // freeing the temporary registers so they can be used in `MarkGCCard`.
2200 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002201 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002202 if (index.IsConstant()) {
2203 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002204 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002205 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002206 destination = HeapOperand(temp,
2207 XRegisterFrom(index),
2208 LSL,
2209 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002210 }
2211
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002212 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2213 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2214 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2215
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002216 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002217 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2218 codegen_->AddSlowPath(slow_path);
2219 if (instruction->GetValueCanBeNull()) {
2220 vixl::Label non_zero;
2221 __ Cbnz(Register(value), &non_zero);
2222 if (!index.IsConstant()) {
2223 __ Add(temp, array, offset);
2224 }
2225 __ Str(wzr, destination);
2226 codegen_->MaybeRecordImplicitNullCheck(instruction);
2227 __ B(&done);
2228 __ Bind(&non_zero);
2229 }
2230
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002231 if (kEmitCompilerReadBarrier) {
2232 // When read barriers are enabled, the type checking
2233 // instrumentation requires two read barriers:
2234 //
2235 // __ Mov(temp2, temp);
2236 // // /* HeapReference<Class> */ temp = temp->component_type_
2237 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002238 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002239 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2240 //
2241 // // /* HeapReference<Class> */ temp2 = value->klass_
2242 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002243 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002244 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2245 //
2246 // __ Cmp(temp, temp2);
2247 //
2248 // However, the second read barrier may trash `temp`, as it
2249 // is a temporary register, and as such would not be saved
2250 // along with live registers before calling the runtime (nor
2251 // restored afterwards). So in this case, we bail out and
2252 // delegate the work to the array set slow path.
2253 //
2254 // TODO: Extend the register allocator to support a new
2255 // "(locally) live temp" location so as to avoid always
2256 // going into the slow path when read barriers are enabled.
2257 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002258 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002259 Register temp2 = temps.AcquireSameSizeAs(array);
2260 // /* HeapReference<Class> */ temp = array->klass_
2261 __ Ldr(temp, HeapOperand(array, class_offset));
2262 codegen_->MaybeRecordImplicitNullCheck(instruction);
2263 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2264
2265 // /* HeapReference<Class> */ temp = temp->component_type_
2266 __ Ldr(temp, HeapOperand(temp, component_offset));
2267 // /* HeapReference<Class> */ temp2 = value->klass_
2268 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2269 // If heap poisoning is enabled, no need to unpoison `temp`
2270 // nor `temp2`, as we are comparing two poisoned references.
2271 __ Cmp(temp, temp2);
2272
2273 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2274 vixl::Label do_put;
2275 __ B(eq, &do_put);
2276 // If heap poisoning is enabled, the `temp` reference has
2277 // not been unpoisoned yet; unpoison it now.
2278 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2279
2280 // /* HeapReference<Class> */ temp = temp->super_class_
2281 __ Ldr(temp, HeapOperand(temp, super_offset));
2282 // If heap poisoning is enabled, no need to unpoison
2283 // `temp`, as we are comparing against null below.
2284 __ Cbnz(temp, slow_path->GetEntryLabel());
2285 __ Bind(&do_put);
2286 } else {
2287 __ B(ne, slow_path->GetEntryLabel());
2288 }
2289 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002290 }
2291 }
2292
2293 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002294 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002295 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002296 __ Mov(temp2, value.W());
2297 GetAssembler()->PoisonHeapReference(temp2);
2298 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002299 }
2300
2301 if (!index.IsConstant()) {
2302 __ Add(temp, array, offset);
2303 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002304 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002305
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002306 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002307 codegen_->MaybeRecordImplicitNullCheck(instruction);
2308 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002309 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002310
2311 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2312
2313 if (done.IsLinked()) {
2314 __ Bind(&done);
2315 }
2316
2317 if (slow_path != nullptr) {
2318 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002319 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002320 }
2321}
2322
Alexandre Rames67555f72014-11-18 10:55:16 +00002323void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002324 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2325 ? LocationSummary::kCallOnSlowPath
2326 : LocationSummary::kNoCall;
2327 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002328 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002329 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002330 if (instruction->HasUses()) {
2331 locations->SetOut(Location::SameAsFirstInput());
2332 }
2333}
2334
2335void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002336 BoundsCheckSlowPathARM64* slow_path =
2337 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002338 codegen_->AddSlowPath(slow_path);
2339
2340 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2341 __ B(slow_path->GetEntryLabel(), hs);
2342}
2343
Alexandre Rames67555f72014-11-18 10:55:16 +00002344void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2345 LocationSummary* locations =
2346 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2347 locations->SetInAt(0, Location::RequiresRegister());
2348 if (check->HasUses()) {
2349 locations->SetOut(Location::SameAsFirstInput());
2350 }
2351}
2352
2353void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2354 // We assume the class is not null.
2355 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2356 check->GetLoadClass(), check, check->GetDexPc(), true);
2357 codegen_->AddSlowPath(slow_path);
2358 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2359}
2360
Roland Levillain1a653882016-03-18 18:05:57 +00002361static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2362 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2363 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2364}
2365
2366void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2367 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2368 Location rhs_loc = instruction->GetLocations()->InAt(1);
2369 if (rhs_loc.IsConstant()) {
2370 // 0.0 is the only immediate that can be encoded directly in
2371 // an FCMP instruction.
2372 //
2373 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2374 // specify that in a floating-point comparison, positive zero
2375 // and negative zero are considered equal, so we can use the
2376 // literal 0.0 for both cases here.
2377 //
2378 // Note however that some methods (Float.equal, Float.compare,
2379 // Float.compareTo, Double.equal, Double.compare,
2380 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2381 // StrictMath.min) consider 0.0 to be (strictly) greater than
2382 // -0.0. So if we ever translate calls to these methods into a
2383 // HCompare instruction, we must handle the -0.0 case with
2384 // care here.
2385 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2386 __ Fcmp(lhs_reg, 0.0);
2387 } else {
2388 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2389 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002390}
2391
Serban Constantinescu02164b32014-11-13 14:05:07 +00002392void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002393 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002394 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2395 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002396 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002397 case Primitive::kPrimBoolean:
2398 case Primitive::kPrimByte:
2399 case Primitive::kPrimShort:
2400 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002401 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002402 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002403 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002404 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002405 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2406 break;
2407 }
2408 case Primitive::kPrimFloat:
2409 case Primitive::kPrimDouble: {
2410 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002411 locations->SetInAt(1,
2412 IsFloatingPointZeroConstant(compare->InputAt(1))
2413 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2414 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002415 locations->SetOut(Location::RequiresRegister());
2416 break;
2417 }
2418 default:
2419 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2420 }
2421}
2422
2423void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2424 Primitive::Type in_type = compare->InputAt(0)->GetType();
2425
2426 // 0 if: left == right
2427 // 1 if: left > right
2428 // -1 if: left < right
2429 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002430 case Primitive::kPrimBoolean:
2431 case Primitive::kPrimByte:
2432 case Primitive::kPrimShort:
2433 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002434 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002435 case Primitive::kPrimLong: {
2436 Register result = OutputRegister(compare);
2437 Register left = InputRegisterAt(compare, 0);
2438 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002439 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002440 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2441 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002442 break;
2443 }
2444 case Primitive::kPrimFloat:
2445 case Primitive::kPrimDouble: {
2446 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002447 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002448 __ Cset(result, ne);
2449 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002450 break;
2451 }
2452 default:
2453 LOG(FATAL) << "Unimplemented compare type " << in_type;
2454 }
2455}
2456
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002457void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002458 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002459
2460 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2461 locations->SetInAt(0, Location::RequiresFpuRegister());
2462 locations->SetInAt(1,
2463 IsFloatingPointZeroConstant(instruction->InputAt(1))
2464 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2465 : Location::RequiresFpuRegister());
2466 } else {
2467 // Integer cases.
2468 locations->SetInAt(0, Location::RequiresRegister());
2469 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2470 }
2471
David Brazdilb3e773e2016-01-26 11:28:37 +00002472 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002473 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002474 }
2475}
2476
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002477void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002478 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002479 return;
2480 }
2481
2482 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002483 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002484 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002485
Roland Levillain7f63c522015-07-13 15:54:55 +00002486 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002487 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002488 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002489 } else {
2490 // Integer cases.
2491 Register lhs = InputRegisterAt(instruction, 0);
2492 Operand rhs = InputOperandAt(instruction, 1);
2493 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002494 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002495 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002496}
2497
2498#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2499 M(Equal) \
2500 M(NotEqual) \
2501 M(LessThan) \
2502 M(LessThanOrEqual) \
2503 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002504 M(GreaterThanOrEqual) \
2505 M(Below) \
2506 M(BelowOrEqual) \
2507 M(Above) \
2508 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002509#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002510void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2511void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002512FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002513#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002514#undef FOR_EACH_CONDITION_INSTRUCTION
2515
Zheng Xuc6667102015-05-15 16:08:45 +08002516void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2517 DCHECK(instruction->IsDiv() || instruction->IsRem());
2518
2519 LocationSummary* locations = instruction->GetLocations();
2520 Location second = locations->InAt(1);
2521 DCHECK(second.IsConstant());
2522
2523 Register out = OutputRegister(instruction);
2524 Register dividend = InputRegisterAt(instruction, 0);
2525 int64_t imm = Int64FromConstant(second.GetConstant());
2526 DCHECK(imm == 1 || imm == -1);
2527
2528 if (instruction->IsRem()) {
2529 __ Mov(out, 0);
2530 } else {
2531 if (imm == 1) {
2532 __ Mov(out, dividend);
2533 } else {
2534 __ Neg(out, dividend);
2535 }
2536 }
2537}
2538
2539void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2540 DCHECK(instruction->IsDiv() || instruction->IsRem());
2541
2542 LocationSummary* locations = instruction->GetLocations();
2543 Location second = locations->InAt(1);
2544 DCHECK(second.IsConstant());
2545
2546 Register out = OutputRegister(instruction);
2547 Register dividend = InputRegisterAt(instruction, 0);
2548 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002549 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002550 int ctz_imm = CTZ(abs_imm);
2551
2552 UseScratchRegisterScope temps(GetVIXLAssembler());
2553 Register temp = temps.AcquireSameSizeAs(out);
2554
2555 if (instruction->IsDiv()) {
2556 __ Add(temp, dividend, abs_imm - 1);
2557 __ Cmp(dividend, 0);
2558 __ Csel(out, temp, dividend, lt);
2559 if (imm > 0) {
2560 __ Asr(out, out, ctz_imm);
2561 } else {
2562 __ Neg(out, Operand(out, ASR, ctz_imm));
2563 }
2564 } else {
2565 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2566 __ Asr(temp, dividend, bits - 1);
2567 __ Lsr(temp, temp, bits - ctz_imm);
2568 __ Add(out, dividend, temp);
2569 __ And(out, out, abs_imm - 1);
2570 __ Sub(out, out, temp);
2571 }
2572}
2573
2574void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2575 DCHECK(instruction->IsDiv() || instruction->IsRem());
2576
2577 LocationSummary* locations = instruction->GetLocations();
2578 Location second = locations->InAt(1);
2579 DCHECK(second.IsConstant());
2580
2581 Register out = OutputRegister(instruction);
2582 Register dividend = InputRegisterAt(instruction, 0);
2583 int64_t imm = Int64FromConstant(second.GetConstant());
2584
2585 Primitive::Type type = instruction->GetResultType();
2586 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2587
2588 int64_t magic;
2589 int shift;
2590 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2591
2592 UseScratchRegisterScope temps(GetVIXLAssembler());
2593 Register temp = temps.AcquireSameSizeAs(out);
2594
2595 // temp = get_high(dividend * magic)
2596 __ Mov(temp, magic);
2597 if (type == Primitive::kPrimLong) {
2598 __ Smulh(temp, dividend, temp);
2599 } else {
2600 __ Smull(temp.X(), dividend, temp);
2601 __ Lsr(temp.X(), temp.X(), 32);
2602 }
2603
2604 if (imm > 0 && magic < 0) {
2605 __ Add(temp, temp, dividend);
2606 } else if (imm < 0 && magic > 0) {
2607 __ Sub(temp, temp, dividend);
2608 }
2609
2610 if (shift != 0) {
2611 __ Asr(temp, temp, shift);
2612 }
2613
2614 if (instruction->IsDiv()) {
2615 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2616 } else {
2617 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2618 // TODO: Strength reduction for msub.
2619 Register temp_imm = temps.AcquireSameSizeAs(out);
2620 __ Mov(temp_imm, imm);
2621 __ Msub(out, temp, temp_imm, dividend);
2622 }
2623}
2624
2625void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2626 DCHECK(instruction->IsDiv() || instruction->IsRem());
2627 Primitive::Type type = instruction->GetResultType();
2628 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2629
2630 LocationSummary* locations = instruction->GetLocations();
2631 Register out = OutputRegister(instruction);
2632 Location second = locations->InAt(1);
2633
2634 if (second.IsConstant()) {
2635 int64_t imm = Int64FromConstant(second.GetConstant());
2636
2637 if (imm == 0) {
2638 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2639 } else if (imm == 1 || imm == -1) {
2640 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002641 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002642 DivRemByPowerOfTwo(instruction);
2643 } else {
2644 DCHECK(imm <= -2 || imm >= 2);
2645 GenerateDivRemWithAnyConstant(instruction);
2646 }
2647 } else {
2648 Register dividend = InputRegisterAt(instruction, 0);
2649 Register divisor = InputRegisterAt(instruction, 1);
2650 if (instruction->IsDiv()) {
2651 __ Sdiv(out, dividend, divisor);
2652 } else {
2653 UseScratchRegisterScope temps(GetVIXLAssembler());
2654 Register temp = temps.AcquireSameSizeAs(out);
2655 __ Sdiv(temp, dividend, divisor);
2656 __ Msub(out, temp, divisor, dividend);
2657 }
2658 }
2659}
2660
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002661void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2662 LocationSummary* locations =
2663 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2664 switch (div->GetResultType()) {
2665 case Primitive::kPrimInt:
2666 case Primitive::kPrimLong:
2667 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002668 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002669 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2670 break;
2671
2672 case Primitive::kPrimFloat:
2673 case Primitive::kPrimDouble:
2674 locations->SetInAt(0, Location::RequiresFpuRegister());
2675 locations->SetInAt(1, Location::RequiresFpuRegister());
2676 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2677 break;
2678
2679 default:
2680 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2681 }
2682}
2683
2684void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2685 Primitive::Type type = div->GetResultType();
2686 switch (type) {
2687 case Primitive::kPrimInt:
2688 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002689 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002690 break;
2691
2692 case Primitive::kPrimFloat:
2693 case Primitive::kPrimDouble:
2694 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2695 break;
2696
2697 default:
2698 LOG(FATAL) << "Unexpected div type " << type;
2699 }
2700}
2701
Alexandre Rames67555f72014-11-18 10:55:16 +00002702void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002703 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2704 ? LocationSummary::kCallOnSlowPath
2705 : LocationSummary::kNoCall;
2706 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002707 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2708 if (instruction->HasUses()) {
2709 locations->SetOut(Location::SameAsFirstInput());
2710 }
2711}
2712
2713void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2714 SlowPathCodeARM64* slow_path =
2715 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2716 codegen_->AddSlowPath(slow_path);
2717 Location value = instruction->GetLocations()->InAt(0);
2718
Alexandre Rames3e69f162014-12-10 10:36:50 +00002719 Primitive::Type type = instruction->GetType();
2720
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002721 if (!Primitive::IsIntegralType(type)) {
2722 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002723 return;
2724 }
2725
Alexandre Rames67555f72014-11-18 10:55:16 +00002726 if (value.IsConstant()) {
2727 int64_t divisor = Int64ConstantFrom(value);
2728 if (divisor == 0) {
2729 __ B(slow_path->GetEntryLabel());
2730 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002731 // A division by a non-null constant is valid. We don't need to perform
2732 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002733 }
2734 } else {
2735 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2736 }
2737}
2738
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002739void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2740 LocationSummary* locations =
2741 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2742 locations->SetOut(Location::ConstantLocation(constant));
2743}
2744
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002745void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2746 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002747 // Will be generated at use site.
2748}
2749
Alexandre Rames5319def2014-10-23 10:03:10 +01002750void LocationsBuilderARM64::VisitExit(HExit* exit) {
2751 exit->SetLocations(nullptr);
2752}
2753
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002754void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002755}
2756
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002757void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2758 LocationSummary* locations =
2759 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2760 locations->SetOut(Location::ConstantLocation(constant));
2761}
2762
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002763void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002764 // Will be generated at use site.
2765}
2766
David Brazdilfc6a86a2015-06-26 10:33:45 +00002767void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002768 DCHECK(!successor->IsExitBlock());
2769 HBasicBlock* block = got->GetBlock();
2770 HInstruction* previous = got->GetPrevious();
2771 HLoopInformation* info = block->GetLoopInformation();
2772
David Brazdil46e2a392015-03-16 17:31:52 +00002773 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002774 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2775 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2776 return;
2777 }
2778 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2779 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2780 }
2781 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002782 __ B(codegen_->GetLabelOf(successor));
2783 }
2784}
2785
David Brazdilfc6a86a2015-06-26 10:33:45 +00002786void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2787 got->SetLocations(nullptr);
2788}
2789
2790void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2791 HandleGoto(got, got->GetSuccessor());
2792}
2793
2794void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2795 try_boundary->SetLocations(nullptr);
2796}
2797
2798void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2799 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2800 if (!successor->IsExitBlock()) {
2801 HandleGoto(try_boundary, successor);
2802 }
2803}
2804
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002805void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002806 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002807 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002808 vixl::Label* false_target) {
2809 // FP branching requires both targets to be explicit. If either of the targets
2810 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2811 vixl::Label fallthrough_target;
2812 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002813
David Brazdil0debae72015-11-12 18:37:00 +00002814 if (true_target == nullptr && false_target == nullptr) {
2815 // Nothing to do. The code always falls through.
2816 return;
2817 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002818 // Constant condition, statically compared against "true" (integer value 1).
2819 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002820 if (true_target != nullptr) {
2821 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002822 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002823 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002824 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002825 if (false_target != nullptr) {
2826 __ B(false_target);
2827 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002828 }
David Brazdil0debae72015-11-12 18:37:00 +00002829 return;
2830 }
2831
2832 // The following code generates these patterns:
2833 // (1) true_target == nullptr && false_target != nullptr
2834 // - opposite condition true => branch to false_target
2835 // (2) true_target != nullptr && false_target == nullptr
2836 // - condition true => branch to true_target
2837 // (3) true_target != nullptr && false_target != nullptr
2838 // - condition true => branch to true_target
2839 // - branch to false_target
2840 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002841 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002842 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002843 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002844 if (true_target == nullptr) {
2845 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2846 } else {
2847 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2848 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002849 } else {
2850 // The condition instruction has not been materialized, use its inputs as
2851 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002852 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002853
David Brazdil0debae72015-11-12 18:37:00 +00002854 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002855 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002856 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002857 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002858 IfCondition opposite_condition = condition->GetOppositeCondition();
2859 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002860 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002861 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002862 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002863 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002864 // Integer cases.
2865 Register lhs = InputRegisterAt(condition, 0);
2866 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002867
2868 Condition arm64_cond;
2869 vixl::Label* non_fallthrough_target;
2870 if (true_target == nullptr) {
2871 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2872 non_fallthrough_target = false_target;
2873 } else {
2874 arm64_cond = ARM64Condition(condition->GetCondition());
2875 non_fallthrough_target = true_target;
2876 }
2877
Aart Bik086d27e2016-01-20 17:02:00 -08002878 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2879 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002880 switch (arm64_cond) {
2881 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002882 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002883 break;
2884 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002885 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002886 break;
2887 case lt:
2888 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002889 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002890 break;
2891 case ge:
2892 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002893 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002894 break;
2895 default:
2896 // Without the `static_cast` the compiler throws an error for
2897 // `-Werror=sign-promo`.
2898 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2899 }
2900 } else {
2901 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002902 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002903 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002904 }
2905 }
David Brazdil0debae72015-11-12 18:37:00 +00002906
2907 // If neither branch falls through (case 3), the conditional branch to `true_target`
2908 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2909 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002910 __ B(false_target);
2911 }
David Brazdil0debae72015-11-12 18:37:00 +00002912
2913 if (fallthrough_target.IsLinked()) {
2914 __ Bind(&fallthrough_target);
2915 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002916}
2917
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002918void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2919 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002920 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002921 locations->SetInAt(0, Location::RequiresRegister());
2922 }
2923}
2924
2925void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002926 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2927 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2928 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2929 nullptr : codegen_->GetLabelOf(true_successor);
2930 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2931 nullptr : codegen_->GetLabelOf(false_successor);
2932 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002933}
2934
2935void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2936 LocationSummary* locations = new (GetGraph()->GetArena())
2937 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002938 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002939 locations->SetInAt(0, Location::RequiresRegister());
2940 }
2941}
2942
2943void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002944 SlowPathCodeARM64* slow_path =
2945 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002946 GenerateTestAndBranch(deoptimize,
2947 /* condition_input_index */ 0,
2948 slow_path->GetEntryLabel(),
2949 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002950}
2951
David Brazdilc0b601b2016-02-08 14:20:45 +00002952enum SelectVariant {
2953 kCsel,
2954 kCselFalseConst,
2955 kCselTrueConst,
2956 kFcsel,
2957};
2958
2959static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2960 return condition->IsCondition() &&
2961 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2962}
2963
2964static inline bool IsRecognizedCselConstant(HInstruction* constant) {
2965 if (constant->IsConstant()) {
2966 int64_t value = Int64FromConstant(constant->AsConstant());
2967 if ((value == -1) || (value == 0) || (value == 1)) {
2968 return true;
2969 }
2970 }
2971 return false;
2972}
2973
2974static inline SelectVariant GetSelectVariant(HSelect* select) {
2975 if (Primitive::IsFloatingPointType(select->GetType())) {
2976 return kFcsel;
2977 } else if (IsRecognizedCselConstant(select->GetFalseValue())) {
2978 return kCselFalseConst;
2979 } else if (IsRecognizedCselConstant(select->GetTrueValue())) {
2980 return kCselTrueConst;
2981 } else {
2982 return kCsel;
2983 }
2984}
2985
2986static inline bool HasSwappedInputs(SelectVariant variant) {
2987 return variant == kCselTrueConst;
2988}
2989
2990static inline Condition GetConditionForSelect(HCondition* condition, SelectVariant variant) {
2991 IfCondition cond = HasSwappedInputs(variant) ? condition->GetOppositeCondition()
2992 : condition->GetCondition();
2993 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2994 : ARM64Condition(cond);
2995}
2996
David Brazdil74eb1b22015-12-14 11:44:01 +00002997void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2998 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
David Brazdilc0b601b2016-02-08 14:20:45 +00002999 switch (GetSelectVariant(select)) {
3000 case kCsel:
3001 locations->SetInAt(0, Location::RequiresRegister());
3002 locations->SetInAt(1, Location::RequiresRegister());
3003 locations->SetOut(Location::RequiresRegister());
3004 break;
3005 case kCselFalseConst:
3006 locations->SetInAt(0, Location::ConstantLocation(select->InputAt(0)->AsConstant()));
3007 locations->SetInAt(1, Location::RequiresRegister());
3008 locations->SetOut(Location::RequiresRegister());
3009 break;
3010 case kCselTrueConst:
3011 locations->SetInAt(0, Location::RequiresRegister());
3012 locations->SetInAt(1, Location::ConstantLocation(select->InputAt(1)->AsConstant()));
3013 locations->SetOut(Location::RequiresRegister());
3014 break;
3015 case kFcsel:
3016 locations->SetInAt(0, Location::RequiresFpuRegister());
3017 locations->SetInAt(1, Location::RequiresFpuRegister());
3018 locations->SetOut(Location::RequiresFpuRegister());
3019 break;
David Brazdil74eb1b22015-12-14 11:44:01 +00003020 }
3021 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3022 locations->SetInAt(2, Location::RequiresRegister());
3023 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003024}
3025
3026void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003027 HInstruction* cond = select->GetCondition();
3028 SelectVariant variant = GetSelectVariant(select);
3029 Condition csel_cond;
3030
3031 if (IsBooleanValueOrMaterializedCondition(cond)) {
3032 if (cond->IsCondition() && cond->GetNext() == select) {
3033 // Condition codes set from previous instruction.
3034 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3035 } else {
3036 __ Cmp(InputRegisterAt(select, 2), 0);
3037 csel_cond = HasSwappedInputs(variant) ? eq : ne;
3038 }
3039 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003040 GenerateFcmp(cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003041 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3042 } else {
3043 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
3044 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3045 }
3046
3047 switch (variant) {
3048 case kCsel:
3049 case kCselFalseConst:
3050 __ Csel(OutputRegister(select),
3051 InputRegisterAt(select, 1),
3052 InputOperandAt(select, 0),
3053 csel_cond);
3054 break;
3055 case kCselTrueConst:
3056 __ Csel(OutputRegister(select),
3057 InputRegisterAt(select, 0),
3058 InputOperandAt(select, 1),
3059 csel_cond);
3060 break;
3061 case kFcsel:
3062 __ Fcsel(OutputFPRegister(select),
3063 InputFPRegisterAt(select, 1),
3064 InputFPRegisterAt(select, 0),
3065 csel_cond);
3066 break;
3067 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003068}
3069
David Srbecky0cf44932015-12-09 14:09:59 +00003070void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3071 new (GetGraph()->GetArena()) LocationSummary(info);
3072}
3073
David Srbeckyd28f4a02016-03-14 17:14:24 +00003074void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3075 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003076}
3077
3078void CodeGeneratorARM64::GenerateNop() {
3079 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003080}
3081
Alexandre Rames5319def2014-10-23 10:03:10 +01003082void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003083 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003084}
3085
3086void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003087 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003088}
3089
3090void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003091 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003092}
3093
3094void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003095 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003096}
3097
Roland Levillain44015862016-01-22 11:47:17 +00003098static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3099 return kEmitCompilerReadBarrier &&
3100 (kUseBakerReadBarrier ||
3101 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3102 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3103 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3104}
3105
Alexandre Rames67555f72014-11-18 10:55:16 +00003106void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003107 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003108 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3109 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003110 case TypeCheckKind::kExactCheck:
3111 case TypeCheckKind::kAbstractClassCheck:
3112 case TypeCheckKind::kClassHierarchyCheck:
3113 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003114 call_kind =
3115 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003116 break;
3117 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003118 case TypeCheckKind::kUnresolvedCheck:
3119 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003120 call_kind = LocationSummary::kCallOnSlowPath;
3121 break;
3122 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003123
Alexandre Rames67555f72014-11-18 10:55:16 +00003124 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003125 locations->SetInAt(0, Location::RequiresRegister());
3126 locations->SetInAt(1, Location::RequiresRegister());
3127 // The "out" register is used as a temporary, so it overlaps with the inputs.
3128 // Note that TypeCheckSlowPathARM64 uses this register too.
3129 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3130 // When read barriers are enabled, we need a temporary register for
3131 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003132 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003133 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003134 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003135}
3136
3137void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003138 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003139 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003140 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003141 Register obj = InputRegisterAt(instruction, 0);
3142 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003143 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003144 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003145 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3146 locations->GetTemp(0) :
3147 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003148 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3149 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3150 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3151 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003152
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003153 vixl::Label done, zero;
3154 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003155
3156 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003157 // Avoid null check if we know `obj` is not null.
3158 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003159 __ Cbz(obj, &zero);
3160 }
3161
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003162 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003163 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003164
Roland Levillain44015862016-01-22 11:47:17 +00003165 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003166 case TypeCheckKind::kExactCheck: {
3167 __ Cmp(out, cls);
3168 __ Cset(out, eq);
3169 if (zero.IsLinked()) {
3170 __ B(&done);
3171 }
3172 break;
3173 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003174
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003175 case TypeCheckKind::kAbstractClassCheck: {
3176 // If the class is abstract, we eagerly fetch the super class of the
3177 // object to avoid doing a comparison we know will fail.
3178 vixl::Label loop, success;
3179 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003180 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003181 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003182 // If `out` is null, we use it for the result, and jump to `done`.
3183 __ Cbz(out, &done);
3184 __ Cmp(out, cls);
3185 __ B(ne, &loop);
3186 __ Mov(out, 1);
3187 if (zero.IsLinked()) {
3188 __ B(&done);
3189 }
3190 break;
3191 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003192
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003193 case TypeCheckKind::kClassHierarchyCheck: {
3194 // Walk over the class hierarchy to find a match.
3195 vixl::Label loop, success;
3196 __ Bind(&loop);
3197 __ Cmp(out, cls);
3198 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003199 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003200 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003201 __ Cbnz(out, &loop);
3202 // If `out` is null, we use it for the result, and jump to `done`.
3203 __ B(&done);
3204 __ Bind(&success);
3205 __ Mov(out, 1);
3206 if (zero.IsLinked()) {
3207 __ B(&done);
3208 }
3209 break;
3210 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003211
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003212 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003213 // Do an exact check.
3214 vixl::Label exact_check;
3215 __ Cmp(out, cls);
3216 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003217 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003218 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003219 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003220 // If `out` is null, we use it for the result, and jump to `done`.
3221 __ Cbz(out, &done);
3222 __ Ldrh(out, HeapOperand(out, primitive_offset));
3223 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3224 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003225 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003226 __ Mov(out, 1);
3227 __ B(&done);
3228 break;
3229 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003230
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003231 case TypeCheckKind::kArrayCheck: {
3232 __ Cmp(out, cls);
3233 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003234 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3235 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003236 codegen_->AddSlowPath(slow_path);
3237 __ B(ne, slow_path->GetEntryLabel());
3238 __ Mov(out, 1);
3239 if (zero.IsLinked()) {
3240 __ B(&done);
3241 }
3242 break;
3243 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003244
Calin Juravle98893e12015-10-02 21:05:03 +01003245 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003246 case TypeCheckKind::kInterfaceCheck: {
3247 // Note that we indeed only call on slow path, but we always go
3248 // into the slow path for the unresolved and interface check
3249 // cases.
3250 //
3251 // We cannot directly call the InstanceofNonTrivial runtime
3252 // entry point without resorting to a type checking slow path
3253 // here (i.e. by calling InvokeRuntime directly), as it would
3254 // require to assign fixed registers for the inputs of this
3255 // HInstanceOf instruction (following the runtime calling
3256 // convention), which might be cluttered by the potential first
3257 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003258 //
3259 // TODO: Introduce a new runtime entry point taking the object
3260 // to test (instead of its class) as argument, and let it deal
3261 // with the read barrier issues. This will let us refactor this
3262 // case of the `switch` code as it was previously (with a direct
3263 // call to the runtime not using a type checking slow path).
3264 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003265 DCHECK(locations->OnlyCallsOnSlowPath());
3266 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3267 /* is_fatal */ false);
3268 codegen_->AddSlowPath(slow_path);
3269 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003270 if (zero.IsLinked()) {
3271 __ B(&done);
3272 }
3273 break;
3274 }
3275 }
3276
3277 if (zero.IsLinked()) {
3278 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003279 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003280 }
3281
3282 if (done.IsLinked()) {
3283 __ Bind(&done);
3284 }
3285
3286 if (slow_path != nullptr) {
3287 __ Bind(slow_path->GetExitLabel());
3288 }
3289}
3290
3291void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3292 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3293 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3294
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003295 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3296 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003297 case TypeCheckKind::kExactCheck:
3298 case TypeCheckKind::kAbstractClassCheck:
3299 case TypeCheckKind::kClassHierarchyCheck:
3300 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003301 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3302 LocationSummary::kCallOnSlowPath :
3303 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003304 break;
3305 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003306 case TypeCheckKind::kUnresolvedCheck:
3307 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003308 call_kind = LocationSummary::kCallOnSlowPath;
3309 break;
3310 }
3311
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003312 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3313 locations->SetInAt(0, Location::RequiresRegister());
3314 locations->SetInAt(1, Location::RequiresRegister());
3315 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3316 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003317 // When read barriers are enabled, we need an additional temporary
3318 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003319 if (TypeCheckNeedsATemporary(type_check_kind)) {
3320 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003321 }
3322}
3323
3324void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003325 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003326 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003327 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003328 Register obj = InputRegisterAt(instruction, 0);
3329 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003330 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003331 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3332 locations->GetTemp(1) :
3333 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003334 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003335 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3336 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3337 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3338 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003339
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003340 bool is_type_check_slow_path_fatal =
3341 (type_check_kind == TypeCheckKind::kExactCheck ||
3342 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3343 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3344 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3345 !instruction->CanThrowIntoCatchBlock();
3346 SlowPathCodeARM64* type_check_slow_path =
3347 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3348 is_type_check_slow_path_fatal);
3349 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003350
3351 vixl::Label done;
3352 // Avoid null check if we know obj is not null.
3353 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003354 __ Cbz(obj, &done);
3355 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003356
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003357 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003358 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003359
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003360 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003361 case TypeCheckKind::kExactCheck:
3362 case TypeCheckKind::kArrayCheck: {
3363 __ Cmp(temp, cls);
3364 // Jump to slow path for throwing the exception or doing a
3365 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003366 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003367 break;
3368 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003370 case TypeCheckKind::kAbstractClassCheck: {
3371 // If the class is abstract, we eagerly fetch the super class of the
3372 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003373 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003374 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003375 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003376 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003377
3378 // If the class reference currently in `temp` is not null, jump
3379 // to the `compare_classes` label to compare it with the checked
3380 // class.
3381 __ Cbnz(temp, &compare_classes);
3382 // Otherwise, jump to the slow path to throw the exception.
3383 //
3384 // But before, move back the object's class into `temp` before
3385 // going into the slow path, as it has been overwritten in the
3386 // meantime.
3387 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003388 GenerateReferenceLoadTwoRegisters(
3389 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003390 __ B(type_check_slow_path->GetEntryLabel());
3391
3392 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003393 __ Cmp(temp, cls);
3394 __ B(ne, &loop);
3395 break;
3396 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003397
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003398 case TypeCheckKind::kClassHierarchyCheck: {
3399 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003400 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003401 __ Bind(&loop);
3402 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003403 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003405 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003406 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003407
3408 // If the class reference currently in `temp` is not null, jump
3409 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003410 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003411 // Otherwise, jump to the slow path to throw the exception.
3412 //
3413 // But before, move back the object's class into `temp` before
3414 // going into the slow path, as it has been overwritten in the
3415 // meantime.
3416 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003417 GenerateReferenceLoadTwoRegisters(
3418 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003419 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003420 break;
3421 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003422
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003423 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003424 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003425 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003426 __ Cmp(temp, cls);
3427 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003428
3429 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003430 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003431 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003432
3433 // If the component type is not null (i.e. the object is indeed
3434 // an array), jump to label `check_non_primitive_component_type`
3435 // to further check that this component type is not a primitive
3436 // type.
3437 __ Cbnz(temp, &check_non_primitive_component_type);
3438 // Otherwise, jump to the slow path to throw the exception.
3439 //
3440 // But before, move back the object's class into `temp` before
3441 // going into the slow path, as it has been overwritten in the
3442 // meantime.
3443 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003444 GenerateReferenceLoadTwoRegisters(
3445 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003446 __ B(type_check_slow_path->GetEntryLabel());
3447
3448 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003449 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3450 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003451 __ Cbz(temp, &done);
3452 // Same comment as above regarding `temp` and the slow path.
3453 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003454 GenerateReferenceLoadTwoRegisters(
3455 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003456 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003457 break;
3458 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003459
Calin Juravle98893e12015-10-02 21:05:03 +01003460 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003461 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003462 // We always go into the type check slow path for the unresolved
3463 // and interface check cases.
3464 //
3465 // We cannot directly call the CheckCast runtime entry point
3466 // without resorting to a type checking slow path here (i.e. by
3467 // calling InvokeRuntime directly), as it would require to
3468 // assign fixed registers for the inputs of this HInstanceOf
3469 // instruction (following the runtime calling convention), which
3470 // might be cluttered by the potential first read barrier
3471 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003472 //
3473 // TODO: Introduce a new runtime entry point taking the object
3474 // to test (instead of its class) as argument, and let it deal
3475 // with the read barrier issues. This will let us refactor this
3476 // case of the `switch` code as it was previously (with a direct
3477 // call to the runtime not using a type checking slow path).
3478 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003479 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003480 break;
3481 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003482 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003483
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003484 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003485}
3486
Alexandre Rames5319def2014-10-23 10:03:10 +01003487void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3488 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3489 locations->SetOut(Location::ConstantLocation(constant));
3490}
3491
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003492void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003493 // Will be generated at use site.
3494}
3495
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003496void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3497 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3498 locations->SetOut(Location::ConstantLocation(constant));
3499}
3500
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003501void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003502 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003503}
3504
Calin Juravle175dc732015-08-25 15:42:32 +01003505void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3506 // The trampoline uses the same calling convention as dex calling conventions,
3507 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3508 // the method_idx.
3509 HandleInvoke(invoke);
3510}
3511
3512void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3513 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3514}
3515
Alexandre Rames5319def2014-10-23 10:03:10 +01003516void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003517 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003518 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003519}
3520
Alexandre Rames67555f72014-11-18 10:55:16 +00003521void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3522 HandleInvoke(invoke);
3523}
3524
3525void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3526 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003527 LocationSummary* locations = invoke->GetLocations();
3528 Register temp = XRegisterFrom(locations->GetTemp(0));
Mathieu Chartiere401d142015-04-22 13:56:20 -07003529 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3530 invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003531 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003532 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003533 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003534
3535 // The register ip1 is required to be used for the hidden argument in
3536 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003537 MacroAssembler* masm = GetVIXLAssembler();
3538 UseScratchRegisterScope scratch_scope(masm);
3539 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003540 scratch_scope.Exclude(ip1);
3541 __ Mov(ip1, invoke->GetDexMethodIndex());
3542
Alexandre Rames67555f72014-11-18 10:55:16 +00003543 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003544 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003545 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003546 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003547 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003548 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003549 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003550 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003551 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003552 // Instead of simply (possibly) unpoisoning `temp` here, we should
3553 // emit a read barrier for the previous class reference load.
3554 // However this is not required in practice, as this is an
3555 // intermediate/temporary reference and because the current
3556 // concurrent copying collector keeps the from-space memory
3557 // intact/accessible until the end of the marking phase (the
3558 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003559 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Alexandre Rames67555f72014-11-18 10:55:16 +00003560 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003561 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003562 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003563 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003564 // lr();
3565 __ Blr(lr);
3566 DCHECK(!codegen_->IsLeafMethod());
3567 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3568}
3569
3570void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003571 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3572 if (intrinsic.TryDispatch(invoke)) {
3573 return;
3574 }
3575
Alexandre Rames67555f72014-11-18 10:55:16 +00003576 HandleInvoke(invoke);
3577}
3578
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003579void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003580 // Explicit clinit checks triggered by static invokes must have been pruned by
3581 // art::PrepareForRegisterAllocation.
3582 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003583
Andreas Gampe878d58c2015-01-15 23:24:00 -08003584 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3585 if (intrinsic.TryDispatch(invoke)) {
3586 return;
3587 }
3588
Alexandre Rames67555f72014-11-18 10:55:16 +00003589 HandleInvoke(invoke);
3590}
3591
Andreas Gampe878d58c2015-01-15 23:24:00 -08003592static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3593 if (invoke->GetLocations()->Intrinsified()) {
3594 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3595 intrinsic.Dispatch(invoke);
3596 return true;
3597 }
3598 return false;
3599}
3600
Vladimir Markodc151b22015-10-15 18:02:30 +01003601HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3602 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3603 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003604 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003605 return desired_dispatch_info;
3606}
3607
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003608void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003609 // For better instruction scheduling we load the direct code pointer before the method pointer.
3610 bool direct_code_loaded = false;
3611 switch (invoke->GetCodePtrLocation()) {
3612 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3613 // LR = code address from literal pool with link-time patch.
3614 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3615 direct_code_loaded = true;
3616 break;
3617 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3618 // LR = invoke->GetDirectCodePtr();
3619 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3620 direct_code_loaded = true;
3621 break;
3622 default:
3623 break;
3624 }
3625
Andreas Gampe878d58c2015-01-15 23:24:00 -08003626 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003627 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3628 switch (invoke->GetMethodLoadKind()) {
3629 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3630 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003631 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003632 break;
3633 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003634 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003635 break;
3636 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3637 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003638 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003639 break;
3640 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3641 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003642 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003643 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3644 break;
3645 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3646 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003647 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3648 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3649 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003650 {
3651 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003652 __ Bind(adrp_label);
3653 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003654 }
Vladimir Marko58155012015-08-19 12:49:41 +00003655 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003656 vixl::Label* ldr_label =
3657 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003658 {
3659 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003660 __ Bind(ldr_label);
3661 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003662 }
Vladimir Marko58155012015-08-19 12:49:41 +00003663 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003664 }
Vladimir Marko58155012015-08-19 12:49:41 +00003665 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003666 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003667 Register reg = XRegisterFrom(temp);
3668 Register method_reg;
3669 if (current_method.IsRegister()) {
3670 method_reg = XRegisterFrom(current_method);
3671 } else {
3672 DCHECK(invoke->GetLocations()->Intrinsified());
3673 DCHECK(!current_method.IsValid());
3674 method_reg = reg;
3675 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3676 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003677
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003678 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003679 __ Ldr(reg.X(),
3680 MemOperand(method_reg.X(),
3681 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003682 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003683 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3684 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003685 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3686 break;
3687 }
3688 }
3689
3690 switch (invoke->GetCodePtrLocation()) {
3691 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3692 __ Bl(&frame_entry_label_);
3693 break;
3694 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3695 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3696 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003697 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3698 __ Bind(label);
3699 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003700 break;
3701 }
3702 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3703 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3704 // LR prepared above for better instruction scheduling.
3705 DCHECK(direct_code_loaded);
3706 // lr()
3707 __ Blr(lr);
3708 break;
3709 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3710 // LR = callee_method->entry_point_from_quick_compiled_code_;
3711 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003712 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003713 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3714 // lr()
3715 __ Blr(lr);
3716 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003717 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003718
Andreas Gampe878d58c2015-01-15 23:24:00 -08003719 DCHECK(!IsLeafMethod());
3720}
3721
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003722void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003723 // Use the calling convention instead of the location of the receiver, as
3724 // intrinsics may have put the receiver in a different register. In the intrinsics
3725 // slow path, the arguments have been moved to the right place, so here we are
3726 // guaranteed that the receiver is the first register of the calling convention.
3727 InvokeDexCallingConvention calling_convention;
3728 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003729 Register temp = XRegisterFrom(temp_in);
3730 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3731 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3732 Offset class_offset = mirror::Object::ClassOffset();
3733 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3734
3735 BlockPoolsScope block_pools(GetVIXLAssembler());
3736
3737 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003738 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003739 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003740 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003741 // Instead of simply (possibly) unpoisoning `temp` here, we should
3742 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003743 // intermediate/temporary reference and because the current
3744 // concurrent copying collector keeps the from-space memory
3745 // intact/accessible until the end of the marking phase (the
3746 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003747 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3748 // temp = temp->GetMethodAt(method_offset);
3749 __ Ldr(temp, MemOperand(temp, method_offset));
3750 // lr = temp->GetEntryPoint();
3751 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3752 // lr();
3753 __ Blr(lr);
3754}
3755
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003756vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3757 uint32_t string_index,
3758 vixl::Label* adrp_label) {
3759 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3760}
3761
3762vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3763 uint32_t element_offset,
3764 vixl::Label* adrp_label) {
3765 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3766}
3767
3768vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3769 uint32_t offset_or_index,
3770 vixl::Label* adrp_label,
3771 ArenaDeque<PcRelativePatchInfo>* patches) {
3772 // Add a patch entry and return the label.
3773 patches->emplace_back(dex_file, offset_or_index);
3774 PcRelativePatchInfo* info = &patches->back();
3775 vixl::Label* label = &info->label;
3776 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3777 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3778 return label;
3779}
3780
3781vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3782 const DexFile& dex_file, uint32_t string_index) {
3783 return boot_image_string_patches_.GetOrCreate(
3784 StringReference(&dex_file, string_index),
3785 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3786}
3787
3788vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3789 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3790 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3791 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3792}
3793
3794vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3795 return DeduplicateUint64Literal(address);
3796}
3797
Vladimir Marko58155012015-08-19 12:49:41 +00003798void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3799 DCHECK(linker_patches->empty());
3800 size_t size =
3801 method_patches_.size() +
3802 call_patches_.size() +
3803 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003804 pc_relative_dex_cache_patches_.size() +
3805 boot_image_string_patches_.size() +
3806 pc_relative_string_patches_.size() +
3807 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003808 linker_patches->reserve(size);
3809 for (const auto& entry : method_patches_) {
3810 const MethodReference& target_method = entry.first;
3811 vixl::Literal<uint64_t>* literal = entry.second;
3812 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3813 target_method.dex_file,
3814 target_method.dex_method_index));
3815 }
3816 for (const auto& entry : call_patches_) {
3817 const MethodReference& target_method = entry.first;
3818 vixl::Literal<uint64_t>* literal = entry.second;
3819 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3820 target_method.dex_file,
3821 target_method.dex_method_index));
3822 }
3823 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003824 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003825 info.target_method.dex_file,
3826 info.target_method.dex_method_index));
3827 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003828 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003829 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003830 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003831 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003832 info.offset_or_index));
3833 }
3834 for (const auto& entry : boot_image_string_patches_) {
3835 const StringReference& target_string = entry.first;
3836 vixl::Literal<uint32_t>* literal = entry.second;
3837 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3838 target_string.dex_file,
3839 target_string.string_index));
3840 }
3841 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3842 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3843 &info.target_dex_file,
3844 info.pc_insn_label->location(),
3845 info.offset_or_index));
3846 }
3847 for (const auto& entry : boot_image_address_patches_) {
3848 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3849 vixl::Literal<uint32_t>* literal = entry.second;
3850 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003851 }
3852}
3853
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003854vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3855 Uint32ToLiteralMap* map) {
3856 return map->GetOrCreate(
3857 value,
3858 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3859}
3860
Vladimir Marko58155012015-08-19 12:49:41 +00003861vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003862 return uint64_literals_.GetOrCreate(
3863 value,
3864 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003865}
3866
3867vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3868 MethodReference target_method,
3869 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003870 return map->GetOrCreate(
3871 target_method,
3872 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003873}
3874
3875vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3876 MethodReference target_method) {
3877 return DeduplicateMethodLiteral(target_method, &method_patches_);
3878}
3879
3880vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3881 MethodReference target_method) {
3882 return DeduplicateMethodLiteral(target_method, &call_patches_);
3883}
3884
3885
Andreas Gampe878d58c2015-01-15 23:24:00 -08003886void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003887 // Explicit clinit checks triggered by static invokes must have been pruned by
3888 // art::PrepareForRegisterAllocation.
3889 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003890
Andreas Gampe878d58c2015-01-15 23:24:00 -08003891 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3892 return;
3893 }
3894
Alexandre Ramesd921d642015-04-16 15:07:16 +01003895 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003896 LocationSummary* locations = invoke->GetLocations();
3897 codegen_->GenerateStaticOrDirectCall(
3898 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003899 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003900}
3901
3902void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003903 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3904 return;
3905 }
3906
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003907 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003908 DCHECK(!codegen_->IsLeafMethod());
3909 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3910}
3911
Alexandre Rames67555f72014-11-18 10:55:16 +00003912void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003913 InvokeRuntimeCallingConvention calling_convention;
3914 CodeGenerator::CreateLoadClassLocationSummary(
3915 cls,
3916 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003917 LocationFrom(vixl::x0),
3918 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003919}
3920
3921void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003922 if (cls->NeedsAccessCheck()) {
3923 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3924 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3925 cls,
3926 cls->GetDexPc(),
3927 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003928 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003929 return;
3930 }
3931
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003933 Register out = OutputRegister(cls);
3934 Register current_method = InputRegisterAt(cls, 0);
3935 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003936 DCHECK(!cls->CanCallRuntime());
3937 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003938 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3939 GenerateGcRootFieldLoad(
3940 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003941 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003942 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003943 // /* GcRoot<mirror::Class>[] */ out =
3944 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003945 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003946 // /* GcRoot<mirror::Class> */ out = out[type_index]
3947 GenerateGcRootFieldLoad(
3948 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003949
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003950 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3951 DCHECK(cls->CanCallRuntime());
3952 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3953 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3954 codegen_->AddSlowPath(slow_path);
3955 if (!cls->IsInDexCache()) {
3956 __ Cbz(out, slow_path->GetEntryLabel());
3957 }
3958 if (cls->MustGenerateClinitCheck()) {
3959 GenerateClassInitializationCheck(slow_path, out);
3960 } else {
3961 __ Bind(slow_path->GetExitLabel());
3962 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003963 }
3964 }
3965}
3966
David Brazdilcb1c0552015-08-04 16:22:25 +01003967static MemOperand GetExceptionTlsAddress() {
3968 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3969}
3970
Alexandre Rames67555f72014-11-18 10:55:16 +00003971void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3972 LocationSummary* locations =
3973 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3974 locations->SetOut(Location::RequiresRegister());
3975}
3976
3977void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003978 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3979}
3980
3981void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3982 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3983}
3984
3985void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3986 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00003987}
3988
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003989HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
3990 HLoadString::LoadKind desired_string_load_kind) {
3991 if (kEmitCompilerReadBarrier) {
3992 switch (desired_string_load_kind) {
3993 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3994 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3995 case HLoadString::LoadKind::kBootImageAddress:
3996 // TODO: Implement for read barrier.
3997 return HLoadString::LoadKind::kDexCacheViaMethod;
3998 default:
3999 break;
4000 }
4001 }
4002 switch (desired_string_load_kind) {
4003 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4004 DCHECK(!GetCompilerOptions().GetCompilePic());
4005 break;
4006 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4007 DCHECK(GetCompilerOptions().GetCompilePic());
4008 break;
4009 case HLoadString::LoadKind::kBootImageAddress:
4010 break;
4011 case HLoadString::LoadKind::kDexCacheAddress:
4012 DCHECK(Runtime::Current()->UseJit());
4013 break;
4014 case HLoadString::LoadKind::kDexCachePcRelative:
4015 DCHECK(!Runtime::Current()->UseJit());
4016 break;
4017 case HLoadString::LoadKind::kDexCacheViaMethod:
4018 break;
4019 }
4020 return desired_string_load_kind;
4021}
4022
Alexandre Rames67555f72014-11-18 10:55:16 +00004023void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004024 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004025 ? LocationSummary::kCallOnSlowPath
4026 : LocationSummary::kNoCall;
4027 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004028 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4029 locations->SetInAt(0, Location::RequiresRegister());
4030 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004031 locations->SetOut(Location::RequiresRegister());
4032}
4033
4034void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004035 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004036 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004037
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004038 switch (load->GetLoadKind()) {
4039 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4040 DCHECK(!kEmitCompilerReadBarrier);
4041 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4042 load->GetStringIndex()));
4043 return; // No dex cache slow path.
4044 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4045 DCHECK(!kEmitCompilerReadBarrier);
4046 // Add ADRP with its PC-relative String patch.
4047 const DexFile& dex_file = load->GetDexFile();
4048 uint32_t string_index = load->GetStringIndex();
4049 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4050 {
4051 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4052 __ Bind(adrp_label);
4053 __ adrp(out.X(), /* offset placeholder */ 0);
4054 }
4055 // Add ADD with its PC-relative String patch.
4056 vixl::Label* add_label =
4057 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4058 {
4059 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4060 __ Bind(add_label);
4061 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4062 }
4063 return; // No dex cache slow path.
4064 }
4065 case HLoadString::LoadKind::kBootImageAddress: {
4066 DCHECK(!kEmitCompilerReadBarrier);
4067 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4068 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4069 return; // No dex cache slow path.
4070 }
4071 case HLoadString::LoadKind::kDexCacheAddress: {
4072 DCHECK_NE(load->GetAddress(), 0u);
4073 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4074 // that gives a 16KiB range. To try and reduce the number of literals if we load
4075 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4076 // loaded from a literal and the remaining offset embedded in the load.
4077 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4078 DCHECK_ALIGNED(load->GetAddress(), 4u);
4079 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4080 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4081 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4082 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4083 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4084 break;
4085 }
4086 case HLoadString::LoadKind::kDexCachePcRelative: {
4087 // Add ADRP with its PC-relative DexCache access patch.
4088 const DexFile& dex_file = load->GetDexFile();
4089 uint32_t element_offset = load->GetDexCacheElementOffset();
4090 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4091 {
4092 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4093 __ Bind(adrp_label);
4094 __ adrp(out.X(), /* offset placeholder */ 0);
4095 }
4096 // Add LDR with its PC-relative DexCache access patch.
4097 vixl::Label* ldr_label =
4098 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4099 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4100 break;
4101 }
4102 case HLoadString::LoadKind::kDexCacheViaMethod: {
4103 Register current_method = InputRegisterAt(load, 0);
4104 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4105 GenerateGcRootFieldLoad(
4106 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4107 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4108 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4109 // /* GcRoot<mirror::String> */ out = out[string_index]
4110 GenerateGcRootFieldLoad(
4111 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4112 break;
4113 }
4114 default:
4115 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4116 UNREACHABLE();
4117 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004118
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004119 if (!load->IsInDexCache()) {
4120 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4121 codegen_->AddSlowPath(slow_path);
4122 __ Cbz(out, slow_path->GetEntryLabel());
4123 __ Bind(slow_path->GetExitLabel());
4124 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004125}
4126
Alexandre Rames5319def2014-10-23 10:03:10 +01004127void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4128 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4129 locations->SetOut(Location::ConstantLocation(constant));
4130}
4131
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004132void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004133 // Will be generated at use site.
4134}
4135
Alexandre Rames67555f72014-11-18 10:55:16 +00004136void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4137 LocationSummary* locations =
4138 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4139 InvokeRuntimeCallingConvention calling_convention;
4140 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4141}
4142
4143void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4144 codegen_->InvokeRuntime(instruction->IsEnter()
4145 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4146 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004147 instruction->GetDexPc(),
4148 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004149 if (instruction->IsEnter()) {
4150 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4151 } else {
4152 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4153 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004154}
4155
Alexandre Rames42d641b2014-10-27 14:00:51 +00004156void LocationsBuilderARM64::VisitMul(HMul* mul) {
4157 LocationSummary* locations =
4158 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4159 switch (mul->GetResultType()) {
4160 case Primitive::kPrimInt:
4161 case Primitive::kPrimLong:
4162 locations->SetInAt(0, Location::RequiresRegister());
4163 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004164 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004165 break;
4166
4167 case Primitive::kPrimFloat:
4168 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004169 locations->SetInAt(0, Location::RequiresFpuRegister());
4170 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004171 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004172 break;
4173
4174 default:
4175 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4176 }
4177}
4178
4179void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4180 switch (mul->GetResultType()) {
4181 case Primitive::kPrimInt:
4182 case Primitive::kPrimLong:
4183 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4184 break;
4185
4186 case Primitive::kPrimFloat:
4187 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004188 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004189 break;
4190
4191 default:
4192 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4193 }
4194}
4195
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004196void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4197 LocationSummary* locations =
4198 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4199 switch (neg->GetResultType()) {
4200 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004201 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004202 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004204 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004205
4206 case Primitive::kPrimFloat:
4207 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004208 locations->SetInAt(0, Location::RequiresFpuRegister());
4209 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004210 break;
4211
4212 default:
4213 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4214 }
4215}
4216
4217void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4218 switch (neg->GetResultType()) {
4219 case Primitive::kPrimInt:
4220 case Primitive::kPrimLong:
4221 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4222 break;
4223
4224 case Primitive::kPrimFloat:
4225 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004226 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004227 break;
4228
4229 default:
4230 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4231 }
4232}
4233
4234void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4235 LocationSummary* locations =
4236 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4237 InvokeRuntimeCallingConvention calling_convention;
4238 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004239 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004240 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004241 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004242}
4243
4244void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4245 LocationSummary* locations = instruction->GetLocations();
4246 InvokeRuntimeCallingConvention calling_convention;
4247 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4248 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004249 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004250 // Note: if heap poisoning is enabled, the entry point takes cares
4251 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004252 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4253 instruction,
4254 instruction->GetDexPc(),
4255 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004256 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004257}
4258
Alexandre Rames5319def2014-10-23 10:03:10 +01004259void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4260 LocationSummary* locations =
4261 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4262 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004263 if (instruction->IsStringAlloc()) {
4264 locations->AddTemp(LocationFrom(kArtMethodRegister));
4265 } else {
4266 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4267 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4268 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004269 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4270}
4271
4272void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004273 // Note: if heap poisoning is enabled, the entry point takes cares
4274 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004275 if (instruction->IsStringAlloc()) {
4276 // String is allocated through StringFactory. Call NewEmptyString entry point.
4277 Location temp = instruction->GetLocations()->GetTemp(0);
4278 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4279 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4280 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4281 __ Blr(lr);
4282 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4283 } else {
4284 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4285 instruction,
4286 instruction->GetDexPc(),
4287 nullptr);
4288 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4289 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004290}
4291
4292void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4293 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004294 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004296}
4297
4298void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004299 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004300 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004301 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004302 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004303 break;
4304
4305 default:
4306 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4307 }
4308}
4309
David Brazdil66d126e2015-04-03 16:02:44 +01004310void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4311 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4312 locations->SetInAt(0, Location::RequiresRegister());
4313 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4314}
4315
4316void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004317 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4318}
4319
Alexandre Rames5319def2014-10-23 10:03:10 +01004320void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004321 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4322 ? LocationSummary::kCallOnSlowPath
4323 : LocationSummary::kNoCall;
4324 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004325 locations->SetInAt(0, Location::RequiresRegister());
4326 if (instruction->HasUses()) {
4327 locations->SetOut(Location::SameAsFirstInput());
4328 }
4329}
4330
Calin Juravle2ae48182016-03-16 14:05:09 +00004331void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4332 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004333 return;
4334 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004335
Alexandre Ramesd921d642015-04-16 15:07:16 +01004336 BlockPoolsScope block_pools(GetVIXLAssembler());
4337 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004338 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004339 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004340}
4341
Calin Juravle2ae48182016-03-16 14:05:09 +00004342void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004343 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004344 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004345
4346 LocationSummary* locations = instruction->GetLocations();
4347 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004348
4349 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004350}
4351
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004352void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004353 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004354}
4355
Alexandre Rames67555f72014-11-18 10:55:16 +00004356void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4357 HandleBinaryOp(instruction);
4358}
4359
4360void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4361 HandleBinaryOp(instruction);
4362}
4363
Alexandre Rames3e69f162014-12-10 10:36:50 +00004364void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4365 LOG(FATAL) << "Unreachable";
4366}
4367
4368void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4369 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4370}
4371
Alexandre Rames5319def2014-10-23 10:03:10 +01004372void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4373 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4374 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4375 if (location.IsStackSlot()) {
4376 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4377 } else if (location.IsDoubleStackSlot()) {
4378 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4379 }
4380 locations->SetOut(location);
4381}
4382
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004383void InstructionCodeGeneratorARM64::VisitParameterValue(
4384 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004385 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004386}
4387
4388void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4389 LocationSummary* locations =
4390 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004391 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004392}
4393
4394void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4395 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4396 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004397}
4398
4399void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4400 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4401 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4402 locations->SetInAt(i, Location::Any());
4403 }
4404 locations->SetOut(Location::Any());
4405}
4406
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004407void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004408 LOG(FATAL) << "Unreachable";
4409}
4410
Serban Constantinescu02164b32014-11-13 14:05:07 +00004411void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004412 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004413 LocationSummary::CallKind call_kind =
4414 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004415 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4416
4417 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004418 case Primitive::kPrimInt:
4419 case Primitive::kPrimLong:
4420 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004421 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004422 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4423 break;
4424
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004425 case Primitive::kPrimFloat:
4426 case Primitive::kPrimDouble: {
4427 InvokeRuntimeCallingConvention calling_convention;
4428 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4429 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4430 locations->SetOut(calling_convention.GetReturnLocation(type));
4431
4432 break;
4433 }
4434
Serban Constantinescu02164b32014-11-13 14:05:07 +00004435 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004436 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004437 }
4438}
4439
4440void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4441 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004442
Serban Constantinescu02164b32014-11-13 14:05:07 +00004443 switch (type) {
4444 case Primitive::kPrimInt:
4445 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004446 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004447 break;
4448 }
4449
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004450 case Primitive::kPrimFloat:
4451 case Primitive::kPrimDouble: {
4452 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4453 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004454 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004455 if (type == Primitive::kPrimFloat) {
4456 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4457 } else {
4458 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4459 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004460 break;
4461 }
4462
Serban Constantinescu02164b32014-11-13 14:05:07 +00004463 default:
4464 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004465 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004466 }
4467}
4468
Calin Juravle27df7582015-04-17 19:12:31 +01004469void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4470 memory_barrier->SetLocations(nullptr);
4471}
4472
4473void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004474 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004475}
4476
Alexandre Rames5319def2014-10-23 10:03:10 +01004477void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4478 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4479 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004480 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004481}
4482
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004483void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004484 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004485}
4486
4487void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4488 instruction->SetLocations(nullptr);
4489}
4490
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004491void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004492 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004493}
4494
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004495void LocationsBuilderARM64::VisitRor(HRor* ror) {
4496 HandleBinaryOp(ror);
4497}
4498
4499void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4500 HandleBinaryOp(ror);
4501}
4502
Serban Constantinescu02164b32014-11-13 14:05:07 +00004503void LocationsBuilderARM64::VisitShl(HShl* shl) {
4504 HandleShift(shl);
4505}
4506
4507void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4508 HandleShift(shl);
4509}
4510
4511void LocationsBuilderARM64::VisitShr(HShr* shr) {
4512 HandleShift(shr);
4513}
4514
4515void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4516 HandleShift(shr);
4517}
4518
Alexandre Rames5319def2014-10-23 10:03:10 +01004519void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004520 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004521}
4522
4523void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004524 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004525}
4526
Alexandre Rames67555f72014-11-18 10:55:16 +00004527void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004528 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004529}
4530
4531void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004532 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004533}
4534
4535void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004536 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004537}
4538
Alexandre Rames67555f72014-11-18 10:55:16 +00004539void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004540 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004541}
4542
Calin Juravlee460d1d2015-09-29 04:52:17 +01004543void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4544 HUnresolvedInstanceFieldGet* instruction) {
4545 FieldAccessCallingConventionARM64 calling_convention;
4546 codegen_->CreateUnresolvedFieldLocationSummary(
4547 instruction, instruction->GetFieldType(), calling_convention);
4548}
4549
4550void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4551 HUnresolvedInstanceFieldGet* instruction) {
4552 FieldAccessCallingConventionARM64 calling_convention;
4553 codegen_->GenerateUnresolvedFieldAccess(instruction,
4554 instruction->GetFieldType(),
4555 instruction->GetFieldIndex(),
4556 instruction->GetDexPc(),
4557 calling_convention);
4558}
4559
4560void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4561 HUnresolvedInstanceFieldSet* instruction) {
4562 FieldAccessCallingConventionARM64 calling_convention;
4563 codegen_->CreateUnresolvedFieldLocationSummary(
4564 instruction, instruction->GetFieldType(), calling_convention);
4565}
4566
4567void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4568 HUnresolvedInstanceFieldSet* instruction) {
4569 FieldAccessCallingConventionARM64 calling_convention;
4570 codegen_->GenerateUnresolvedFieldAccess(instruction,
4571 instruction->GetFieldType(),
4572 instruction->GetFieldIndex(),
4573 instruction->GetDexPc(),
4574 calling_convention);
4575}
4576
4577void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4578 HUnresolvedStaticFieldGet* instruction) {
4579 FieldAccessCallingConventionARM64 calling_convention;
4580 codegen_->CreateUnresolvedFieldLocationSummary(
4581 instruction, instruction->GetFieldType(), calling_convention);
4582}
4583
4584void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4585 HUnresolvedStaticFieldGet* instruction) {
4586 FieldAccessCallingConventionARM64 calling_convention;
4587 codegen_->GenerateUnresolvedFieldAccess(instruction,
4588 instruction->GetFieldType(),
4589 instruction->GetFieldIndex(),
4590 instruction->GetDexPc(),
4591 calling_convention);
4592}
4593
4594void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4595 HUnresolvedStaticFieldSet* instruction) {
4596 FieldAccessCallingConventionARM64 calling_convention;
4597 codegen_->CreateUnresolvedFieldLocationSummary(
4598 instruction, instruction->GetFieldType(), calling_convention);
4599}
4600
4601void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4602 HUnresolvedStaticFieldSet* instruction) {
4603 FieldAccessCallingConventionARM64 calling_convention;
4604 codegen_->GenerateUnresolvedFieldAccess(instruction,
4605 instruction->GetFieldType(),
4606 instruction->GetFieldIndex(),
4607 instruction->GetDexPc(),
4608 calling_convention);
4609}
4610
Alexandre Rames5319def2014-10-23 10:03:10 +01004611void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4612 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4613}
4614
4615void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004616 HBasicBlock* block = instruction->GetBlock();
4617 if (block->GetLoopInformation() != nullptr) {
4618 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4619 // The back edge will generate the suspend check.
4620 return;
4621 }
4622 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4623 // The goto will generate the suspend check.
4624 return;
4625 }
4626 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004627}
4628
Alexandre Rames67555f72014-11-18 10:55:16 +00004629void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4630 LocationSummary* locations =
4631 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4632 InvokeRuntimeCallingConvention calling_convention;
4633 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4634}
4635
4636void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4637 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004638 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004639 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004640}
4641
4642void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4643 LocationSummary* locations =
4644 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4645 Primitive::Type input_type = conversion->GetInputType();
4646 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004647 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004648 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4649 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4650 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4651 }
4652
Alexandre Rames542361f2015-01-29 16:57:31 +00004653 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004654 locations->SetInAt(0, Location::RequiresFpuRegister());
4655 } else {
4656 locations->SetInAt(0, Location::RequiresRegister());
4657 }
4658
Alexandre Rames542361f2015-01-29 16:57:31 +00004659 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004660 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4661 } else {
4662 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4663 }
4664}
4665
4666void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4667 Primitive::Type result_type = conversion->GetResultType();
4668 Primitive::Type input_type = conversion->GetInputType();
4669
4670 DCHECK_NE(input_type, result_type);
4671
Alexandre Rames542361f2015-01-29 16:57:31 +00004672 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004673 int result_size = Primitive::ComponentSize(result_type);
4674 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004675 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004676 Register output = OutputRegister(conversion);
4677 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004678 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004679 // 'int' values are used directly as W registers, discarding the top
4680 // bits, so we don't need to sign-extend and can just perform a move.
4681 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4682 // top 32 bits of the target register. We theoretically could leave those
4683 // bits unchanged, but we would have to make sure that no code uses a
4684 // 32bit input value as a 64bit value assuming that the top 32 bits are
4685 // zero.
4686 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004687 } else if (result_type == Primitive::kPrimChar ||
4688 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4689 __ Ubfx(output,
4690 output.IsX() ? source.X() : source.W(),
4691 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004692 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004693 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004694 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004695 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004696 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004697 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004698 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4699 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004700 } else if (Primitive::IsFloatingPointType(result_type) &&
4701 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004702 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4703 } else {
4704 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4705 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004706 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004707}
Alexandre Rames67555f72014-11-18 10:55:16 +00004708
Serban Constantinescu02164b32014-11-13 14:05:07 +00004709void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4710 HandleShift(ushr);
4711}
4712
4713void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4714 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004715}
4716
4717void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4718 HandleBinaryOp(instruction);
4719}
4720
4721void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4722 HandleBinaryOp(instruction);
4723}
4724
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004725void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004726 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004727 LOG(FATAL) << "Unreachable";
4728}
4729
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004730void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004731 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004732 LOG(FATAL) << "Unreachable";
4733}
4734
Mark Mendellfe57faa2015-09-18 09:26:15 -04004735// Simple implementation of packed switch - generate cascaded compare/jumps.
4736void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4737 LocationSummary* locations =
4738 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4739 locations->SetInAt(0, Location::RequiresRegister());
4740}
4741
4742void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4743 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004744 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004745 Register value_reg = InputRegisterAt(switch_instr, 0);
4746 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4747
Zheng Xu3927c8b2015-11-18 17:46:25 +08004748 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4749 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4750 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4751 // make sure we don't emit it if the target may run out of range.
4752 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4753 // ranges and emit the tables only as required.
4754 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004755
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004756 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004757 // Current instruction id is an upper bound of the number of HIRs in the graph.
4758 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4759 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004760 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4761 Register temp = temps.AcquireW();
4762 __ Subs(temp, value_reg, Operand(lower_bound));
4763
Zheng Xu3927c8b2015-11-18 17:46:25 +08004764 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004765 // Jump to successors[0] if value == lower_bound.
4766 __ B(eq, codegen_->GetLabelOf(successors[0]));
4767 int32_t last_index = 0;
4768 for (; num_entries - last_index > 2; last_index += 2) {
4769 __ Subs(temp, temp, Operand(2));
4770 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4771 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4772 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4773 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4774 }
4775 if (num_entries - last_index == 2) {
4776 // The last missing case_value.
4777 __ Cmp(temp, Operand(1));
4778 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004779 }
4780
4781 // And the default for any other value.
4782 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4783 __ B(codegen_->GetLabelOf(default_block));
4784 }
4785 } else {
4786 JumpTableARM64* jump_table = new (GetGraph()->GetArena()) JumpTableARM64(switch_instr);
4787 codegen_->AddJumpTable(jump_table);
4788
4789 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4790
4791 // Below instructions should use at most one blocked register. Since there are two blocked
4792 // registers, we are free to block one.
4793 Register temp_w = temps.AcquireW();
4794 Register index;
4795 // Remove the bias.
4796 if (lower_bound != 0) {
4797 index = temp_w;
4798 __ Sub(index, value_reg, Operand(lower_bound));
4799 } else {
4800 index = value_reg;
4801 }
4802
4803 // Jump to default block if index is out of the range.
4804 __ Cmp(index, Operand(num_entries));
4805 __ B(hs, codegen_->GetLabelOf(default_block));
4806
4807 // In current VIXL implementation, it won't require any blocked registers to encode the
4808 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4809 // register pressure.
4810 Register table_base = temps.AcquireX();
4811 // Load jump offset from the table.
4812 __ Adr(table_base, jump_table->GetTableStartLabel());
4813 Register jump_offset = temp_w;
4814 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4815
4816 // Jump to target block by branching to table_base(pc related) + offset.
4817 Register target_address = table_base;
4818 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4819 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004820 }
4821}
4822
Roland Levillain44015862016-01-22 11:47:17 +00004823void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4824 Location out,
4825 uint32_t offset,
4826 Location maybe_temp) {
4827 Primitive::Type type = Primitive::kPrimNot;
4828 Register out_reg = RegisterFrom(out, type);
4829 if (kEmitCompilerReadBarrier) {
4830 Register temp_reg = RegisterFrom(maybe_temp, type);
4831 if (kUseBakerReadBarrier) {
4832 // Load with fast path based Baker's read barrier.
4833 // /* HeapReference<Object> */ out = *(out + offset)
4834 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4835 out,
4836 out_reg,
4837 offset,
4838 temp_reg,
4839 /* needs_null_check */ false,
4840 /* use_load_acquire */ false);
4841 } else {
4842 // Load with slow path based read barrier.
4843 // Save the value of `out` into `maybe_temp` before overwriting it
4844 // in the following move operation, as we will need it for the
4845 // read barrier below.
4846 __ Mov(temp_reg, out_reg);
4847 // /* HeapReference<Object> */ out = *(out + offset)
4848 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4849 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4850 }
4851 } else {
4852 // Plain load with no read barrier.
4853 // /* HeapReference<Object> */ out = *(out + offset)
4854 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4855 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4856 }
4857}
4858
4859void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4860 Location out,
4861 Location obj,
4862 uint32_t offset,
4863 Location maybe_temp) {
4864 Primitive::Type type = Primitive::kPrimNot;
4865 Register out_reg = RegisterFrom(out, type);
4866 Register obj_reg = RegisterFrom(obj, type);
4867 if (kEmitCompilerReadBarrier) {
4868 if (kUseBakerReadBarrier) {
4869 // Load with fast path based Baker's read barrier.
4870 Register temp_reg = RegisterFrom(maybe_temp, type);
4871 // /* HeapReference<Object> */ out = *(obj + offset)
4872 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4873 out,
4874 obj_reg,
4875 offset,
4876 temp_reg,
4877 /* needs_null_check */ false,
4878 /* use_load_acquire */ false);
4879 } else {
4880 // Load with slow path based read barrier.
4881 // /* HeapReference<Object> */ out = *(obj + offset)
4882 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4883 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4884 }
4885 } else {
4886 // Plain load with no read barrier.
4887 // /* HeapReference<Object> */ out = *(obj + offset)
4888 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4889 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4890 }
4891}
4892
4893void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4894 Location root,
4895 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004896 uint32_t offset,
4897 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00004898 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4899 if (kEmitCompilerReadBarrier) {
4900 if (kUseBakerReadBarrier) {
4901 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4902 // Baker's read barrier are used:
4903 //
4904 // root = obj.field;
4905 // if (Thread::Current()->GetIsGcMarking()) {
4906 // root = ReadBarrier::Mark(root)
4907 // }
4908
4909 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004910 if (fixup_label == nullptr) {
4911 __ Ldr(root_reg, MemOperand(obj, offset));
4912 } else {
4913 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4914 __ Bind(fixup_label);
4915 __ ldr(root_reg, MemOperand(obj, offset));
4916 }
Roland Levillain44015862016-01-22 11:47:17 +00004917 static_assert(
4918 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4919 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4920 "have different sizes.");
4921 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4922 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4923 "have different sizes.");
4924
4925 // Slow path used to mark the GC root `root`.
4926 SlowPathCodeARM64* slow_path =
4927 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4928 codegen_->AddSlowPath(slow_path);
4929
4930 MacroAssembler* masm = GetVIXLAssembler();
4931 UseScratchRegisterScope temps(masm);
4932 Register temp = temps.AcquireW();
4933 // temp = Thread::Current()->GetIsGcMarking()
4934 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4935 __ Cbnz(temp, slow_path->GetEntryLabel());
4936 __ Bind(slow_path->GetExitLabel());
4937 } else {
4938 // GC root loaded through a slow path for read barriers other
4939 // than Baker's.
4940 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004941 if (fixup_label == nullptr) {
4942 __ Add(root_reg.X(), obj.X(), offset);
4943 } else {
4944 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4945 __ Bind(fixup_label);
4946 __ add(root_reg.X(), obj.X(), offset);
4947 }
Roland Levillain44015862016-01-22 11:47:17 +00004948 // /* mirror::Object* */ root = root->Read()
4949 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4950 }
4951 } else {
4952 // Plain GC root load with no read barrier.
4953 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004954 if (fixup_label == nullptr) {
4955 __ Ldr(root_reg, MemOperand(obj, offset));
4956 } else {
4957 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4958 __ Bind(fixup_label);
4959 __ ldr(root_reg, MemOperand(obj, offset));
4960 }
Roland Levillain44015862016-01-22 11:47:17 +00004961 // Note that GC roots are not affected by heap poisoning, thus we
4962 // do not have to unpoison `root_reg` here.
4963 }
4964}
4965
4966void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4967 Location ref,
4968 vixl::Register obj,
4969 uint32_t offset,
4970 Register temp,
4971 bool needs_null_check,
4972 bool use_load_acquire) {
4973 DCHECK(kEmitCompilerReadBarrier);
4974 DCHECK(kUseBakerReadBarrier);
4975
4976 // /* HeapReference<Object> */ ref = *(obj + offset)
4977 Location no_index = Location::NoLocation();
4978 GenerateReferenceLoadWithBakerReadBarrier(
4979 instruction, ref, obj, offset, no_index, temp, needs_null_check, use_load_acquire);
4980}
4981
4982void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4983 Location ref,
4984 vixl::Register obj,
4985 uint32_t data_offset,
4986 Location index,
4987 Register temp,
4988 bool needs_null_check) {
4989 DCHECK(kEmitCompilerReadBarrier);
4990 DCHECK(kUseBakerReadBarrier);
4991
4992 // Array cells are never volatile variables, therefore array loads
4993 // never use Load-Acquire instructions on ARM64.
4994 const bool use_load_acquire = false;
4995
4996 // /* HeapReference<Object> */ ref =
4997 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4998 GenerateReferenceLoadWithBakerReadBarrier(
4999 instruction, ref, obj, data_offset, index, temp, needs_null_check, use_load_acquire);
5000}
5001
5002void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5003 Location ref,
5004 vixl::Register obj,
5005 uint32_t offset,
5006 Location index,
5007 Register temp,
5008 bool needs_null_check,
5009 bool use_load_acquire) {
5010 DCHECK(kEmitCompilerReadBarrier);
5011 DCHECK(kUseBakerReadBarrier);
5012 // If `index` is a valid location, then we are emitting an array
5013 // load, so we shouldn't be using a Load Acquire instruction.
5014 // In other words: `index.IsValid()` => `!use_load_acquire`.
5015 DCHECK(!index.IsValid() || !use_load_acquire);
5016
5017 MacroAssembler* masm = GetVIXLAssembler();
5018 UseScratchRegisterScope temps(masm);
5019
5020 // In slow path based read barriers, the read barrier call is
5021 // inserted after the original load. However, in fast path based
5022 // Baker's read barriers, we need to perform the load of
5023 // mirror::Object::monitor_ *before* the original reference load.
5024 // This load-load ordering is required by the read barrier.
5025 // The fast path/slow path (for Baker's algorithm) should look like:
5026 //
5027 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5028 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5029 // HeapReference<Object> ref = *src; // Original reference load.
5030 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5031 // if (is_gray) {
5032 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5033 // }
5034 //
5035 // Note: the original implementation in ReadBarrier::Barrier is
5036 // slightly more complex as it performs additional checks that we do
5037 // not do here for performance reasons.
5038
5039 Primitive::Type type = Primitive::kPrimNot;
5040 Register ref_reg = RegisterFrom(ref, type);
5041 DCHECK(obj.IsW());
5042 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5043
5044 // /* int32_t */ monitor = obj->monitor_
5045 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5046 if (needs_null_check) {
5047 MaybeRecordImplicitNullCheck(instruction);
5048 }
5049 // /* LockWord */ lock_word = LockWord(monitor)
5050 static_assert(sizeof(LockWord) == sizeof(int32_t),
5051 "art::LockWord and int32_t have different sizes.");
5052 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
5053 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
5054 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
5055 static_assert(
5056 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
5057 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
5058
5059 // Introduce a dependency on the high bits of rb_state, which shall
5060 // be all zeroes, to prevent load-load reordering, and without using
5061 // a memory barrier (which would be more expensive).
5062 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
5063 Register temp2 = temps.AcquireW();
5064 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
5065 // obj is unchanged by this operation, but its value now depends on
5066 // temp2, which depends on temp.
5067 __ Add(obj, obj, Operand(temp2));
5068 temps.Release(temp2);
5069
5070 // The actual reference load.
5071 if (index.IsValid()) {
5072 static_assert(
5073 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5074 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005075 // /* HeapReference<Object> */ ref =
5076 // *(obj + offset + index * sizeof(HeapReference<Object>))
Roland Levillainca0bf032016-02-09 12:49:18 +00005077 const size_t shift_amount = Primitive::ComponentSizeShift(type);
Roland Levillain44015862016-01-22 11:47:17 +00005078 if (index.IsConstant()) {
Roland Levillainca0bf032016-02-09 12:49:18 +00005079 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << shift_amount);
5080 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillain44015862016-01-22 11:47:17 +00005081 } else {
Roland Levillainca0bf032016-02-09 12:49:18 +00005082 temp2 = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00005083 __ Add(temp2, obj, offset);
Roland Levillainca0bf032016-02-09 12:49:18 +00005084 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, shift_amount));
5085 temps.Release(temp2);
Roland Levillain44015862016-01-22 11:47:17 +00005086 }
Roland Levillain44015862016-01-22 11:47:17 +00005087 } else {
5088 // /* HeapReference<Object> */ ref = *(obj + offset)
5089 MemOperand field = HeapOperand(obj, offset);
5090 if (use_load_acquire) {
5091 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5092 } else {
5093 Load(type, ref_reg, field);
5094 }
5095 }
5096
5097 // Object* ref = ref_addr->AsMirrorPtr()
5098 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5099
5100 // Slow path used to mark the object `ref` when it is gray.
5101 SlowPathCodeARM64* slow_path =
5102 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5103 AddSlowPath(slow_path);
5104
5105 // if (rb_state == ReadBarrier::gray_ptr_)
5106 // ref = ReadBarrier::Mark(ref);
5107 __ Cmp(temp, ReadBarrier::gray_ptr_);
5108 __ B(eq, slow_path->GetEntryLabel());
5109 __ Bind(slow_path->GetExitLabel());
5110}
5111
5112void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5113 Location out,
5114 Location ref,
5115 Location obj,
5116 uint32_t offset,
5117 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005118 DCHECK(kEmitCompilerReadBarrier);
5119
Roland Levillain44015862016-01-22 11:47:17 +00005120 // Insert a slow path based read barrier *after* the reference load.
5121 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005122 // If heap poisoning is enabled, the unpoisoning of the loaded
5123 // reference will be carried out by the runtime within the slow
5124 // path.
5125 //
5126 // Note that `ref` currently does not get unpoisoned (when heap
5127 // poisoning is enabled), which is alright as the `ref` argument is
5128 // not used by the artReadBarrierSlow entry point.
5129 //
5130 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5131 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5132 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5133 AddSlowPath(slow_path);
5134
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005135 __ B(slow_path->GetEntryLabel());
5136 __ Bind(slow_path->GetExitLabel());
5137}
5138
Roland Levillain44015862016-01-22 11:47:17 +00005139void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5140 Location out,
5141 Location ref,
5142 Location obj,
5143 uint32_t offset,
5144 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005145 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005146 // Baker's read barriers shall be handled by the fast path
5147 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5148 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005149 // If heap poisoning is enabled, unpoisoning will be taken care of
5150 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005151 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005152 } else if (kPoisonHeapReferences) {
5153 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5154 }
5155}
5156
Roland Levillain44015862016-01-22 11:47:17 +00005157void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5158 Location out,
5159 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005160 DCHECK(kEmitCompilerReadBarrier);
5161
Roland Levillain44015862016-01-22 11:47:17 +00005162 // Insert a slow path based read barrier *after* the GC root load.
5163 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005164 // Note that GC roots are not affected by heap poisoning, so we do
5165 // not need to do anything special for this here.
5166 SlowPathCodeARM64* slow_path =
5167 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5168 AddSlowPath(slow_path);
5169
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005170 __ B(slow_path->GetEntryLabel());
5171 __ Bind(slow_path->GetExitLabel());
5172}
5173
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005174void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5175 LocationSummary* locations =
5176 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5177 locations->SetInAt(0, Location::RequiresRegister());
5178 locations->SetOut(Location::RequiresRegister());
5179}
5180
5181void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5182 LocationSummary* locations = instruction->GetLocations();
5183 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005184 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005185 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5186 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5187 } else {
5188 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
5189 instruction->GetIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
5190 }
5191 __ Ldr(XRegisterFrom(locations->Out()),
5192 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5193}
5194
5195
5196
Alexandre Rames67555f72014-11-18 10:55:16 +00005197#undef __
5198#undef QUICK_ENTRY_POINT
5199
Alexandre Rames5319def2014-10-23 10:03:10 +01005200} // namespace arm64
5201} // namespace art