blob: d463830ff6f535c7fdca2fff1d63f14639736081 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000037using vixl::ExactAssemblyScope;
38using vixl::CodeBufferCheckScope;
39using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010040
41#ifdef __
42#error "ARM64 Codegen VIXL macro-assembler macro already defined."
43#endif
44
Alexandre Rames5319def2014-10-23 10:03:10 +010045namespace art {
46
Roland Levillain22ccc3a2015-11-24 13:10:05 +000047template<class MirrorType>
48class GcRoot;
49
Alexandre Rames5319def2014-10-23 10:03:10 +010050namespace arm64 {
51
Alexandre Ramesbe919d92016-08-23 18:33:36 +010052using helpers::ARM64EncodableConstantOrRegister;
53using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054using helpers::CPURegisterFrom;
55using helpers::DRegisterFrom;
56using helpers::FPRegisterFrom;
57using helpers::HeapOperand;
58using helpers::HeapOperandFrom;
59using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010063using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080064using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010065using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080066using helpers::LocationFrom;
67using helpers::OperandFromMemOperand;
68using helpers::OutputCPURegister;
69using helpers::OutputFPRegister;
70using helpers::OutputRegister;
71using helpers::RegisterFrom;
72using helpers::StackOperandFrom;
73using helpers::VIXLRegCodeFromART;
74using helpers::WRegisterFrom;
75using helpers::XRegisterFrom;
76
Alexandre Rames5319def2014-10-23 10:03:10 +010077static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080079// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
80// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000081static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010082
Alexandre Rames5319def2014-10-23 10:03:10 +010083inline Condition ARM64Condition(IfCondition cond) {
84 switch (cond) {
85 case kCondEQ: return eq;
86 case kCondNE: return ne;
87 case kCondLT: return lt;
88 case kCondLE: return le;
89 case kCondGT: return gt;
90 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070091 case kCondB: return lo;
92 case kCondBE: return ls;
93 case kCondA: return hi;
94 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010095 }
Roland Levillain7f63c522015-07-13 15:54:55 +000096 LOG(FATAL) << "Unreachable";
97 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010098}
99
Vladimir Markod6e069b2016-01-18 11:11:01 +0000100inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
101 // The ARM64 condition codes can express all the necessary branches, see the
102 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
103 // There is no dex instruction or HIR that would need the missing conditions
104 // "equal or unordered" or "not equal".
105 switch (cond) {
106 case kCondEQ: return eq;
107 case kCondNE: return ne /* unordered */;
108 case kCondLT: return gt_bias ? cc : lt /* unordered */;
109 case kCondLE: return gt_bias ? ls : le /* unordered */;
110 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
111 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
112 default:
113 LOG(FATAL) << "UNREACHABLE";
114 UNREACHABLE();
115 }
116}
117
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000118Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000119 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
120 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
121 // but we use the exact registers for clarity.
122 if (return_type == Primitive::kPrimFloat) {
123 return LocationFrom(s0);
124 } else if (return_type == Primitive::kPrimDouble) {
125 return LocationFrom(d0);
126 } else if (return_type == Primitive::kPrimLong) {
127 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100128 } else if (return_type == Primitive::kPrimVoid) {
129 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000130 } else {
131 return LocationFrom(w0);
132 }
133}
134
Alexandre Rames5319def2014-10-23 10:03:10 +0100135Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100137}
138
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100139// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
140#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700141#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100142
Zheng Xuda403092015-04-24 17:35:39 +0800143// Calculate memory accessing operand for save/restore live registers.
144static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800146 int64_t spill_offset,
147 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100148 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
149 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
150 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800151 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800153 codegen->GetNumberOfFloatingPointRegisters()));
154
Vladimir Marko804b03f2016-09-14 16:26:36 +0100155 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
156 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800157
158 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
159 UseScratchRegisterScope temps(masm);
160
161 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
163 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800164 int64_t reg_size = kXRegSizeInBytes;
165 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
166 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100167 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800168 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
169 // If the offset does not fit in the instruction's immediate field, use an alternate register
170 // to compute the base address(float point registers spill base address).
171 Register new_base = temps.AcquireSameSizeAs(base);
172 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
173 base = new_base;
174 spill_offset = -core_spill_size;
175 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
176 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
177 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
178 }
179
180 if (is_save) {
181 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
182 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 } else {
184 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
185 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
186 }
187}
188
189void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800190 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100191 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
192 for (uint32_t i : LowToHighBits(core_spills)) {
193 // If the register holds an object, update the stack mask.
194 if (locations->RegisterContainsObject(i)) {
195 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800196 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100197 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
198 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
199 saved_core_stack_offsets_[i] = stack_offset;
200 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800201 }
202
Vladimir Marko804b03f2016-09-14 16:26:36 +0100203 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
204 for (uint32_t i : LowToHighBits(fp_spills)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800209 }
210
Vladimir Marko804b03f2016-09-14 16:26:36 +0100211 SaveRestoreLiveRegistersHelper(codegen,
212 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100217 SaveRestoreLiveRegistersHelper(codegen,
218 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000264 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
Vladimir Markoea4c1262017-02-06 19:59:33 +0000281 bool do_clinit,
282 vixl::aarch64::Register bss_entry_temp = vixl::aarch64::Register(),
283 vixl::aarch64::Label* bss_entry_adrp_label = nullptr)
284 : SlowPathCodeARM64(at),
285 cls_(cls),
286 dex_pc_(dex_pc),
287 do_clinit_(do_clinit),
288 bss_entry_temp_(bss_entry_temp),
289 bss_entry_adrp_label_(bss_entry_adrp_label) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000290 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
291 }
292
293 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000295 Location out = locations->Out();
296 constexpr bool call_saves_everything_except_r0_ip0 = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexandre Rames67555f72014-11-18 10:55:16 +0000297 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
298
Vladimir Markoea4c1262017-02-06 19:59:33 +0000299 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the page address of
300 // the entry which is in a scratch register. Make sure it's not used for saving/restoring
301 // registers. Exclude the scratch register also for non-Baker read barrier for simplicity.
302 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
303 bool is_load_class_bss_entry =
304 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
305 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
306 if (is_load_class_bss_entry) {
307 // This temp is a scratch register.
308 DCHECK(bss_entry_temp_.IsValid());
309 temps.Exclude(bss_entry_temp_);
310 }
311
Alexandre Rames67555f72014-11-18 10:55:16 +0000312 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000313 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314
315 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000316 dex::TypeIndex type_index = cls_->GetTypeIndex();
317 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000318 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
319 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000320 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800321 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100322 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800323 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100324 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800325 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000326
327 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000328 if (out.IsValid()) {
329 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000330 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000331 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000332 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000333 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000334 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
Vladimir Markoea4c1262017-02-06 19:59:33 +0000335 if (is_load_class_bss_entry) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000336 DCHECK(out.IsValid());
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000337 const DexFile& dex_file = cls_->GetDexFile();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000338 if (call_saves_everything_except_r0_ip0) {
339 // The class entry page address was preserved in bss_entry_temp_ thanks to kSaveEverything.
340 } else {
341 // For non-Baker read barrier, we need to re-calculate the address of the class entry page.
342 bss_entry_adrp_label_ = arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
343 arm64_codegen->EmitAdrpPlaceholder(bss_entry_adrp_label_, bss_entry_temp_);
344 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000345 vixl::aarch64::Label* strp_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +0000346 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label_);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000347 {
348 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
349 __ Bind(strp_label);
350 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Markoea4c1262017-02-06 19:59:33 +0000351 MemOperand(bss_entry_temp_, /* offset placeholder */ 0));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000352 }
353 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 __ B(GetExitLabel());
355 }
356
Alexandre Rames9931f312015-06-19 14:47:01 +0100357 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
358
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 private:
360 // The class this slow path will load.
361 HLoadClass* const cls_;
362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 // The dex PC of `at_`.
364 const uint32_t dex_pc_;
365
366 // Whether to initialize the class.
367 const bool do_clinit_;
368
Vladimir Markoea4c1262017-02-06 19:59:33 +0000369 // For HLoadClass/kBssEntry, the temp register and the label of the ADRP where it was loaded.
370 vixl::aarch64::Register bss_entry_temp_;
371 vixl::aarch64::Label* bss_entry_adrp_label_;
372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
374};
375
Vladimir Markoaad75c62016-10-03 08:46:48 +0000376class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
377 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100378 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
379 : SlowPathCodeARM64(instruction),
380 temp_(temp),
381 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000382
383 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
384 LocationSummary* locations = instruction_->GetLocations();
385 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
386 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
387
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100388 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
389 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
390 temps.Exclude(temp_);
391
Vladimir Markoaad75c62016-10-03 08:46:48 +0000392 __ Bind(GetEntryLabel());
393 SaveLiveRegisters(codegen, locations);
394
395 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000396 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
397 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000398 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
399 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
400 Primitive::Type type = instruction_->GetType();
401 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
402
403 RestoreLiveRegisters(codegen, locations);
404
405 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000406 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100407 if (!kUseReadBarrier || kUseBakerReadBarrier) {
408 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
409 } else {
410 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
411 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
412 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
413 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000414 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100415 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000416 {
417 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
418 __ Bind(strp_label);
419 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100420 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000421 }
422
423 __ B(GetExitLabel());
424 }
425
426 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
427
428 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100429 const Register temp_;
430 vixl::aarch64::Label* adrp_label_;
431
Vladimir Markoaad75c62016-10-03 08:46:48 +0000432 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
433};
434
Alexandre Rames5319def2014-10-23 10:03:10 +0100435class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100438
Alexandre Rames67555f72014-11-18 10:55:16 +0000439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
440 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100441 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000442 if (instruction_->CanThrowIntoCatchBlock()) {
443 // Live registers will be restored in the catch block if caught.
444 SaveLiveRegisters(codegen, instruction_->GetLocations());
445 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000446 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
447 instruction_,
448 instruction_->GetDexPc(),
449 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800450 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 }
452
Alexandre Rames8158f282015-08-07 10:26:17 +0100453 bool IsFatal() const OVERRIDE { return true; }
454
Alexandre Rames9931f312015-06-19 14:47:01 +0100455 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
456
Alexandre Rames5319def2014-10-23 10:03:10 +0100457 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100458 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
459};
460
461class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
462 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100463 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000464 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100465
Alexandre Rames67555f72014-11-18 10:55:16 +0000466 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
467 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100468 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000469 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000471 if (successor_ == nullptr) {
472 __ B(GetReturnLabel());
473 } else {
474 __ B(arm64_codegen->GetLabelOf(successor_));
475 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100476 }
477
Scott Wakeling97c72b72016-06-24 16:19:36 +0100478 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100479 DCHECK(successor_ == nullptr);
480 return &return_label_;
481 }
482
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100483 HBasicBlock* GetSuccessor() const {
484 return successor_;
485 }
486
Alexandre Rames9931f312015-06-19 14:47:01 +0100487 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
488
Alexandre Rames5319def2014-10-23 10:03:10 +0100489 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100490 // If not null, the block to branch to after the suspend check.
491 HBasicBlock* const successor_;
492
493 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100494 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100495
496 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
497};
498
Alexandre Rames67555f72014-11-18 10:55:16 +0000499class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
500 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000501 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000502 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000503
504 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000505 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800506
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 DCHECK(instruction_->IsCheckCast()
508 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
509 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100510 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000513
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000514 if (!is_fatal_) {
515 SaveLiveRegisters(codegen, locations);
516 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000517
518 // We're moving two locations to locations that could overlap, so we need a parallel
519 // move resolver.
520 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800521 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800522 LocationFrom(calling_convention.GetRegisterAt(0)),
523 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800524 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800525 LocationFrom(calling_convention.GetRegisterAt(1)),
526 Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800529 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000530 Primitive::Type ret_type = instruction_->GetType();
531 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
532 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
533 } else {
534 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800535 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
536 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000537 }
538
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000539 if (!is_fatal_) {
540 RestoreLiveRegisters(codegen, locations);
541 __ B(GetExitLabel());
542 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000543 }
544
Alexandre Rames9931f312015-06-19 14:47:01 +0100545 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100546 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100547
Alexandre Rames67555f72014-11-18 10:55:16 +0000548 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000549 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000550
Alexandre Rames67555f72014-11-18 10:55:16 +0000551 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
552};
553
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700554class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
555 public:
Aart Bik42249c32016-01-07 15:33:50 -0800556 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000557 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700558
559 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800560 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700561 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000562 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000563 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700564 }
565
Alexandre Rames9931f312015-06-19 14:47:01 +0100566 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700568 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700569 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
570};
571
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100572class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
573 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000574 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100575
576 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
577 LocationSummary* locations = instruction_->GetLocations();
578 __ Bind(GetEntryLabel());
579 SaveLiveRegisters(codegen, locations);
580
581 InvokeRuntimeCallingConvention calling_convention;
582 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
583 parallel_move.AddMove(
584 locations->InAt(0),
585 LocationFrom(calling_convention.GetRegisterAt(0)),
586 Primitive::kPrimNot,
587 nullptr);
588 parallel_move.AddMove(
589 locations->InAt(1),
590 LocationFrom(calling_convention.GetRegisterAt(1)),
591 Primitive::kPrimInt,
592 nullptr);
593 parallel_move.AddMove(
594 locations->InAt(2),
595 LocationFrom(calling_convention.GetRegisterAt(2)),
596 Primitive::kPrimNot,
597 nullptr);
598 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
599
600 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000601 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100602 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
603 RestoreLiveRegisters(codegen, locations);
604 __ B(GetExitLabel());
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
608
609 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100610 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
611};
612
Zheng Xu3927c8b2015-11-18 17:46:25 +0800613void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
614 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000615 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800616
617 // We are about to use the assembler to place literals directly. Make sure we have enough
618 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000619 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
620 num_entries * sizeof(int32_t),
621 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800622
623 __ Bind(&table_start_);
624 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
625 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100626 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800627 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100628 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800629 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
630 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
631 Literal<int32_t> literal(jump_offset);
632 __ place(&literal);
633 }
634}
635
Roland Levillain54f869e2017-03-06 13:54:11 +0000636// Abstract base class for read barrier slow paths marking a reference
637// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000638//
Roland Levillain54f869e2017-03-06 13:54:11 +0000639// Argument `entrypoint` must be a register location holding the read
640// barrier marking runtime entry point to be invoked.
641class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
642 protected:
643 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
644 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000645 DCHECK(kEmitCompilerReadBarrier);
646 }
647
Roland Levillain54f869e2017-03-06 13:54:11 +0000648 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000649
Roland Levillain54f869e2017-03-06 13:54:11 +0000650 // Generate assembly code calling the read barrier marking runtime
651 // entry point (ReadBarrierMarkRegX).
652 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000653 // No need to save live registers; it's taken care of by the
654 // entrypoint. Also, there is no need to update the stack mask,
655 // as this runtime call will not trigger a garbage collection.
656 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
657 DCHECK_NE(ref_.reg(), LR);
658 DCHECK_NE(ref_.reg(), WSP);
659 DCHECK_NE(ref_.reg(), WZR);
660 // IP0 is used internally by the ReadBarrierMarkRegX entry point
661 // as a temporary, it cannot be the entry point's input/output.
662 DCHECK_NE(ref_.reg(), IP0);
663 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
664 // "Compact" slow path, saving two moves.
665 //
666 // Instead of using the standard runtime calling convention (input
667 // and output in W0):
668 //
669 // W0 <- ref
670 // W0 <- ReadBarrierMark(W0)
671 // ref <- W0
672 //
673 // we just use rX (the register containing `ref`) as input and output
674 // of a dedicated entrypoint:
675 //
676 // rX <- ReadBarrierMarkRegX(rX)
677 //
678 if (entrypoint_.IsValid()) {
679 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
680 __ Blr(XRegisterFrom(entrypoint_));
681 } else {
682 // Entrypoint is not already loaded, load from the thread.
683 int32_t entry_point_offset =
684 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
685 // This runtime call does not require a stack map.
686 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
687 }
688 }
689
690 // The location (register) of the marked object reference.
691 const Location ref_;
692
693 // The location of the entrypoint if it is already loaded.
694 const Location entrypoint_;
695
Roland Levillain54f869e2017-03-06 13:54:11 +0000696 private:
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
698};
699
Alexandre Rames5319def2014-10-23 10:03:10 +0100700// Slow path marking an object reference `ref` during a read
701// barrier. The field `obj.field` in the object `obj` holding this
Roland Levillain54f869e2017-03-06 13:54:11 +0000702// reference does not get updated by this slow path after marking.
Alexandre Rames5319def2014-10-23 10:03:10 +0100703//
704// This means that after the execution of this slow path, `ref` will
705// always be up-to-date, but `obj.field` may not; i.e., after the
706// flip, `ref` will be a to-space reference, but `obj.field` will
707// probably still be a from-space reference (unless it gets updated by
708// another thread, or if another thread installed another object
709// reference (different from `ref`) in `obj.field`).
710//
711// If `entrypoint` is a valid location it is assumed to already be
712// holding the entrypoint. The case where the entrypoint is passed in
Roland Levillainba650a42017-03-06 13:52:32 +0000713// is when the decision to mark is based on whether the GC is marking.
Roland Levillain54f869e2017-03-06 13:54:11 +0000714class ReadBarrierMarkSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
Alexandre Rames5319def2014-10-23 10:03:10 +0100715 public:
716 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
717 Location ref,
718 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000719 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100720 DCHECK(kEmitCompilerReadBarrier);
Alexandre Rames5319def2014-10-23 10:03:10 +0100721 }
722
723 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
724
725 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames542361f2015-01-29 16:57:31 +0000726 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100727 DCHECK(locations->CanCall());
728 DCHECK(ref_.IsRegister()) << ref_;
Alexandre Rames542361f2015-01-29 16:57:31 +0000729 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000730 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
731 << "Unexpected instruction in read barrier marking slow path: "
732 << instruction_->DebugName();
733
734 __ Bind(GetEntryLabel());
735 GenerateReadBarrierMarkRuntimeCall(codegen);
736 __ B(GetExitLabel());
737 }
738
739 private:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000740 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
741};
742
Roland Levillain54f869e2017-03-06 13:54:11 +0000743// Slow path loading `obj`'s lock word, loading a reference from
744// object `*(obj + offset + (index << scale_factor))` into `ref`, and
745// marking `ref` if `obj` is gray according to the lock word (Baker
746// read barrier). The field `obj.field` in the object `obj` holding
747// this reference does not get updated by this slow path after marking
748// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
749// below for that).
750//
751// This means that after the execution of this slow path, `ref` will
752// always be up-to-date, but `obj.field` may not; i.e., after the
753// flip, `ref` will be a to-space reference, but `obj.field` will
754// probably still be a from-space reference (unless it gets updated by
755// another thread, or if another thread installed another object
756// reference (different from `ref`) in `obj.field`).
757//
758// Argument `entrypoint` must be a register location holding the read
759// barrier marking runtime entry point to be invoked.
760class LoadReferenceWithBakerReadBarrierSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
761 public:
762 LoadReferenceWithBakerReadBarrierSlowPathARM64(HInstruction* instruction,
763 Location ref,
764 Register obj,
765 uint32_t offset,
766 Location index,
767 size_t scale_factor,
768 bool needs_null_check,
769 bool use_load_acquire,
770 Register temp,
771 Location entrypoint)
772 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
773 obj_(obj),
774 offset_(offset),
775 index_(index),
776 scale_factor_(scale_factor),
777 needs_null_check_(needs_null_check),
778 use_load_acquire_(use_load_acquire),
779 temp_(temp) {
780 DCHECK(kEmitCompilerReadBarrier);
781 DCHECK(kUseBakerReadBarrier);
782 }
783
784 const char* GetDescription() const OVERRIDE {
785 return "LoadReferenceWithBakerReadBarrierSlowPathARM64";
786 }
787
788 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
789 LocationSummary* locations = instruction_->GetLocations();
790 DCHECK(locations->CanCall());
791 DCHECK(ref_.IsRegister()) << ref_;
792 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
793 DCHECK(obj_.IsW());
794 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
Alexandre Rames5319def2014-10-23 10:03:10 +0100795 DCHECK(instruction_->IsInstanceFieldGet() ||
796 instruction_->IsStaticFieldGet() ||
797 instruction_->IsArrayGet() ||
798 instruction_->IsArraySet() ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100799 instruction_->IsInstanceOf() ||
800 instruction_->IsCheckCast() ||
801 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
802 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
803 << "Unexpected instruction in read barrier marking slow path: "
804 << instruction_->DebugName();
805 // The read barrier instrumentation of object ArrayGet
806 // instructions does not support the HIntermediateAddress
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000807 // instruction.
808 DCHECK(!(instruction_->IsArrayGet() &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000809 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
810
Roland Levillain54f869e2017-03-06 13:54:11 +0000811 // Temporary register `temp_`, used to store the lock word, must
812 // not be IP0 nor IP1, as we may use them to emit the reference
813 // load (in the call to GenerateRawReferenceLoad below), and we
814 // need the lock word to still be in `temp_` after the reference
815 // load.
816 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
817 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
818
Alexandre Rames5319def2014-10-23 10:03:10 +0100819 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000820
821 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
822 // inserted after the original load. However, in fast path based
823 // Baker's read barriers, we need to perform the load of
824 // mirror::Object::monitor_ *before* the original reference load.
825 // This load-load ordering is required by the read barrier.
826 // The fast path/slow path (for Baker's algorithm) should look like:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100827 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000828 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
829 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
830 // HeapReference<mirror::Object> ref = *src; // Original reference load.
831 // bool is_gray = (rb_state == ReadBarrier::GrayState());
832 // if (is_gray) {
833 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
834 // }
Roland Levillaind966ce72017-02-09 16:20:14 +0000835 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000836 // Note: the original implementation in ReadBarrier::Barrier is
837 // slightly more complex as it performs additional checks that we do
838 // not do here for performance reasons.
839
840 // /* int32_t */ monitor = obj->monitor_
841 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
842 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
843 if (needs_null_check_) {
844 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100845 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000846 // /* LockWord */ lock_word = LockWord(monitor)
847 static_assert(sizeof(LockWord) == sizeof(int32_t),
848 "art::LockWord and int32_t have different sizes.");
849
850 // Introduce a dependency on the lock_word including rb_state,
851 // to prevent load-load reordering, and without using
852 // a memory barrier (which would be more expensive).
853 // `obj` is unchanged by this operation, but its value now depends
854 // on `temp`.
855 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
856
857 // The actual reference load.
858 // A possible implicit null check has already been handled above.
859 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
860 arm64_codegen->GenerateRawReferenceLoad(instruction_,
861 ref_,
862 obj_,
863 offset_,
864 index_,
865 scale_factor_,
866 /* needs_null_check */ false,
867 use_load_acquire_);
868
869 // Mark the object `ref` when `obj` is gray.
870 //
871 // if (rb_state == ReadBarrier::GrayState())
872 // ref = ReadBarrier::Mark(ref);
873 //
874 // Given the numeric representation, it's enough to check the low bit of the rb_state.
875 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
876 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
877 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
878 GenerateReadBarrierMarkRuntimeCall(codegen);
879
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000880 __ B(GetExitLabel());
881 }
882
883 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000884 // The register containing the object holding the marked object reference field.
885 Register obj_;
886 // The offset, index and scale factor to access the reference in `obj_`.
887 uint32_t offset_;
888 Location index_;
889 size_t scale_factor_;
890 // Is a null check required?
891 bool needs_null_check_;
892 // Should this reference load use Load-Acquire semantics?
893 bool use_load_acquire_;
894 // A temporary register used to hold the lock word of `obj_`.
895 Register temp_;
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000896
Roland Levillain54f869e2017-03-06 13:54:11 +0000897 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM64);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000898};
899
Roland Levillain54f869e2017-03-06 13:54:11 +0000900// Slow path loading `obj`'s lock word, loading a reference from
901// object `*(obj + offset + (index << scale_factor))` into `ref`, and
902// marking `ref` if `obj` is gray according to the lock word (Baker
903// read barrier). If needed, this slow path also atomically updates
904// the field `obj.field` in the object `obj` holding this reference
905// after marking (contrary to
906// LoadReferenceWithBakerReadBarrierSlowPathARM64 above, which never
907// tries to update `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100908//
909// This means that after the execution of this slow path, both `ref`
910// and `obj.field` will be up-to-date; i.e., after the flip, both will
911// hold the same to-space reference (unless another thread installed
912// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000913//
Roland Levillain54f869e2017-03-06 13:54:11 +0000914// Argument `entrypoint` must be a register location holding the read
915// barrier marking runtime entry point to be invoked.
916class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
917 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100918 public:
Roland Levillain54f869e2017-03-06 13:54:11 +0000919 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(HInstruction* instruction,
920 Location ref,
921 Register obj,
922 uint32_t offset,
923 Location index,
924 size_t scale_factor,
925 bool needs_null_check,
926 bool use_load_acquire,
927 Register temp,
928 Location entrypoint)
929 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100930 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000931 offset_(offset),
932 index_(index),
933 scale_factor_(scale_factor),
934 needs_null_check_(needs_null_check),
935 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000936 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100937 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000938 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100939 }
940
941 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000942 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100943 }
944
945 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
946 LocationSummary* locations = instruction_->GetLocations();
947 Register ref_reg = WRegisterFrom(ref_);
948 DCHECK(locations->CanCall());
949 DCHECK(ref_.IsRegister()) << ref_;
950 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000951 DCHECK(obj_.IsW());
952 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
953
954 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100955 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
956 << "Unexpected instruction in read barrier marking and field updating slow path: "
957 << instruction_->DebugName();
958 DCHECK(instruction_->GetLocations()->Intrinsified());
959 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000960 DCHECK_EQ(offset_, 0u);
961 DCHECK_EQ(scale_factor_, 0u);
962 DCHECK_EQ(use_load_acquire_, false);
963 // The location of the offset of the marked reference field within `obj_`.
964 Location field_offset = index_;
965 DCHECK(field_offset.IsRegister()) << field_offset;
966
967 // Temporary register `temp_`, used to store the lock word, must
968 // not be IP0 nor IP1, as we may use them to emit the reference
969 // load (in the call to GenerateRawReferenceLoad below), and we
970 // need the lock word to still be in `temp_` after the reference
971 // load.
972 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
973 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100974
975 __ Bind(GetEntryLabel());
976
Roland Levillain54f869e2017-03-06 13:54:11 +0000977 // /* int32_t */ monitor = obj->monitor_
978 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
979 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
980 if (needs_null_check_) {
981 codegen->MaybeRecordImplicitNullCheck(instruction_);
982 }
983 // /* LockWord */ lock_word = LockWord(monitor)
984 static_assert(sizeof(LockWord) == sizeof(int32_t),
985 "art::LockWord and int32_t have different sizes.");
986
987 // Introduce a dependency on the lock_word including rb_state,
988 // to prevent load-load reordering, and without using
989 // a memory barrier (which would be more expensive).
990 // `obj` is unchanged by this operation, but its value now depends
991 // on `temp`.
992 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
993
994 // The actual reference load.
995 // A possible implicit null check has already been handled above.
996 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
997 arm64_codegen->GenerateRawReferenceLoad(instruction_,
998 ref_,
999 obj_,
1000 offset_,
1001 index_,
1002 scale_factor_,
1003 /* needs_null_check */ false,
1004 use_load_acquire_);
1005
1006 // Mark the object `ref` when `obj` is gray.
1007 //
1008 // if (rb_state == ReadBarrier::GrayState())
1009 // ref = ReadBarrier::Mark(ref);
1010 //
1011 // Given the numeric representation, it's enough to check the low bit of the rb_state.
1012 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
1013 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
1014 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
1015
1016 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001017 // Note that we cannot use IP to save the old reference, as IP is
1018 // used internally by the ReadBarrierMarkRegX entry point, and we
1019 // need the old reference after the call to that entry point.
1020 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
1021 __ Mov(temp_.W(), ref_reg);
1022
Roland Levillain54f869e2017-03-06 13:54:11 +00001023 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001024
1025 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +00001026 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001027 //
1028 // Note that this field could also hold a different object, if
1029 // another thread had concurrently changed it. In that case, the
1030 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
1031 // (CAS) operation below would abort the CAS, leaving the field
1032 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001033 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +00001034 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001035
1036 // Update the the holder's field atomically. This may fail if
1037 // mutator updates before us, but it's OK. This is achieved
1038 // using a strong compare-and-set (CAS) operation with relaxed
1039 // memory synchronization ordering, where the expected value is
1040 // the old reference and the desired value is the new reference.
1041
1042 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
1043 UseScratchRegisterScope temps(masm);
1044
1045 // Convenience aliases.
1046 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +00001047 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001048 Register expected = temp_.W();
1049 Register value = ref_reg;
1050 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1051 Register tmp_value = temps.AcquireW(); // Value in memory.
1052
1053 __ Add(tmp_ptr, base.X(), Operand(offset));
1054
1055 if (kPoisonHeapReferences) {
1056 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
1057 if (value.Is(expected)) {
1058 // Do not poison `value`, as it is the same register as
1059 // `expected`, which has just been poisoned.
1060 } else {
1061 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
1062 }
1063 }
1064
1065 // do {
1066 // tmp_value = [tmp_ptr] - expected;
1067 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1068
Roland Levillain24a4d112016-10-26 13:10:46 +01001069 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001070 __ Bind(&loop_head);
1071 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1072 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +01001073 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001074 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
1075 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +01001076 __ B(&exit_loop);
1077 __ Bind(&comparison_failed);
1078 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001079 __ Bind(&exit_loop);
1080
1081 if (kPoisonHeapReferences) {
1082 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
1083 if (value.Is(expected)) {
1084 // Do not unpoison `value`, as it is the same register as
1085 // `expected`, which has just been unpoisoned.
1086 } else {
1087 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
1088 }
1089 }
1090
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001091 __ B(GetExitLabel());
1092 }
1093
1094 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001095 // The register containing the object holding the marked object reference field.
1096 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001097 // The offset, index and scale factor to access the reference in `obj_`.
1098 uint32_t offset_;
1099 Location index_;
1100 size_t scale_factor_;
1101 // Is a null check required?
1102 bool needs_null_check_;
1103 // Should this reference load use Load-Acquire semantics?
1104 bool use_load_acquire_;
1105 // A temporary register used to hold the lock word of `obj_`; and
1106 // also to hold the original reference value, when the reference is
1107 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001108 const Register temp_;
1109
Roland Levillain54f869e2017-03-06 13:54:11 +00001110 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001111};
1112
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001113// Slow path generating a read barrier for a heap reference.
1114class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
1115 public:
1116 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
1117 Location out,
1118 Location ref,
1119 Location obj,
1120 uint32_t offset,
1121 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +00001122 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001123 out_(out),
1124 ref_(ref),
1125 obj_(obj),
1126 offset_(offset),
1127 index_(index) {
1128 DCHECK(kEmitCompilerReadBarrier);
1129 // If `obj` is equal to `out` or `ref`, it means the initial object
1130 // has been overwritten by (or after) the heap object reference load
1131 // to be instrumented, e.g.:
1132 //
1133 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +00001134 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001135 //
1136 // In that case, we have lost the information about the original
1137 // object, and the emitted read barrier cannot work properly.
1138 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1139 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1140 }
1141
1142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1143 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1144 LocationSummary* locations = instruction_->GetLocations();
1145 Primitive::Type type = Primitive::kPrimNot;
1146 DCHECK(locations->CanCall());
1147 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +01001148 DCHECK(instruction_->IsInstanceFieldGet() ||
1149 instruction_->IsStaticFieldGet() ||
1150 instruction_->IsArrayGet() ||
1151 instruction_->IsInstanceOf() ||
1152 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001153 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +00001154 << "Unexpected instruction in read barrier for heap reference slow path: "
1155 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001156 // The read barrier instrumentation of object ArrayGet
1157 // instructions does not support the HIntermediateAddress
1158 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001159 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +01001160 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001161
1162 __ Bind(GetEntryLabel());
1163
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001164 SaveLiveRegisters(codegen, locations);
1165
1166 // We may have to change the index's value, but as `index_` is a
1167 // constant member (like other "inputs" of this slow path),
1168 // introduce a copy of it, `index`.
1169 Location index = index_;
1170 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001171 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001172 if (instruction_->IsArrayGet()) {
1173 // Compute the actual memory offset and store it in `index`.
1174 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
1175 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
1176 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
1177 // We are about to change the value of `index_reg` (see the
1178 // calls to vixl::MacroAssembler::Lsl and
1179 // vixl::MacroAssembler::Mov below), but it has
1180 // not been saved by the previous call to
1181 // art::SlowPathCode::SaveLiveRegisters, as it is a
1182 // callee-save register --
1183 // art::SlowPathCode::SaveLiveRegisters does not consider
1184 // callee-save registers, as it has been designed with the
1185 // assumption that callee-save registers are supposed to be
1186 // handled by the called function. So, as a callee-save
1187 // register, `index_reg` _would_ eventually be saved onto
1188 // the stack, but it would be too late: we would have
1189 // changed its value earlier. Therefore, we manually save
1190 // it here into another freely available register,
1191 // `free_reg`, chosen of course among the caller-save
1192 // registers (as a callee-save `free_reg` register would
1193 // exhibit the same problem).
1194 //
1195 // Note we could have requested a temporary register from
1196 // the register allocator instead; but we prefer not to, as
1197 // this is a slow path, and we know we can find a
1198 // caller-save register that is available.
1199 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1200 __ Mov(free_reg.W(), index_reg);
1201 index_reg = free_reg;
1202 index = LocationFrom(index_reg);
1203 } else {
1204 // The initial register stored in `index_` has already been
1205 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1206 // (as it is not a callee-save register), so we can freely
1207 // use it.
1208 }
1209 // Shifting the index value contained in `index_reg` by the scale
1210 // factor (2) cannot overflow in practice, as the runtime is
1211 // unable to allocate object arrays with a size larger than
1212 // 2^26 - 1 (that is, 2^28 - 4 bytes).
1213 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
1214 static_assert(
1215 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1216 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1217 __ Add(index_reg, index_reg, Operand(offset_));
1218 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001219 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1220 // intrinsics, `index_` is not shifted by a scale factor of 2
1221 // (as in the case of ArrayGet), as it is actually an offset
1222 // to an object field within an object.
1223 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001224 DCHECK(instruction_->GetLocations()->Intrinsified());
1225 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1226 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1227 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001228 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001229 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001230 }
1231 }
1232
1233 // We're moving two or three locations to locations that could
1234 // overlap, so we need a parallel move resolver.
1235 InvokeRuntimeCallingConvention calling_convention;
1236 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
1237 parallel_move.AddMove(ref_,
1238 LocationFrom(calling_convention.GetRegisterAt(0)),
1239 type,
1240 nullptr);
1241 parallel_move.AddMove(obj_,
1242 LocationFrom(calling_convention.GetRegisterAt(1)),
1243 type,
1244 nullptr);
1245 if (index.IsValid()) {
1246 parallel_move.AddMove(index,
1247 LocationFrom(calling_convention.GetRegisterAt(2)),
1248 Primitive::kPrimInt,
1249 nullptr);
1250 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1251 } else {
1252 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1253 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1254 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001255 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001256 instruction_,
1257 instruction_->GetDexPc(),
1258 this);
1259 CheckEntrypointTypes<
1260 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1261 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1262
1263 RestoreLiveRegisters(codegen, locations);
1264
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001265 __ B(GetExitLabel());
1266 }
1267
1268 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1269
1270 private:
1271 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001272 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1273 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001274 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1275 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1276 return Register(VIXLRegCodeFromART(i), kXRegSize);
1277 }
1278 }
1279 // We shall never fail to find a free caller-save register, as
1280 // there are more than two core caller-save registers on ARM64
1281 // (meaning it is possible to find one which is different from
1282 // `ref` and `obj`).
1283 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1284 LOG(FATAL) << "Could not find a free register";
1285 UNREACHABLE();
1286 }
1287
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001288 const Location out_;
1289 const Location ref_;
1290 const Location obj_;
1291 const uint32_t offset_;
1292 // An additional location containing an index to an array.
1293 // Only used for HArrayGet and the UnsafeGetObject &
1294 // UnsafeGetObjectVolatile intrinsics.
1295 const Location index_;
1296
1297 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1298};
1299
1300// Slow path generating a read barrier for a GC root.
1301class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1302 public:
1303 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001304 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001305 DCHECK(kEmitCompilerReadBarrier);
1306 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001307
1308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1309 LocationSummary* locations = instruction_->GetLocations();
1310 Primitive::Type type = Primitive::kPrimNot;
1311 DCHECK(locations->CanCall());
1312 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001313 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1314 << "Unexpected instruction in read barrier for GC root slow path: "
1315 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001316
1317 __ Bind(GetEntryLabel());
1318 SaveLiveRegisters(codegen, locations);
1319
1320 InvokeRuntimeCallingConvention calling_convention;
1321 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1322 // The argument of the ReadBarrierForRootSlow is not a managed
1323 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1324 // thus we need a 64-bit move here, and we cannot use
1325 //
1326 // arm64_codegen->MoveLocation(
1327 // LocationFrom(calling_convention.GetRegisterAt(0)),
1328 // root_,
1329 // type);
1330 //
1331 // which would emit a 32-bit move, as `type` is a (32-bit wide)
1332 // reference type (`Primitive::kPrimNot`).
1333 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001334 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001335 instruction_,
1336 instruction_->GetDexPc(),
1337 this);
1338 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1339 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1340
1341 RestoreLiveRegisters(codegen, locations);
1342 __ B(GetExitLabel());
1343 }
1344
1345 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1346
1347 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001348 const Location out_;
1349 const Location root_;
1350
1351 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1352};
1353
Alexandre Rames5319def2014-10-23 10:03:10 +01001354#undef __
1355
1356Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
1357 Location next_location;
1358 if (type == Primitive::kPrimVoid) {
1359 LOG(FATAL) << "Unreachable type " << type;
1360 }
1361
1362 if (Primitive::IsFloatingPointType(type) &&
1363 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001364 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
1365 } else if (!Primitive::IsFloatingPointType(type) &&
1366 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
1367 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1368 } else {
1369 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +00001370 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1371 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001372 }
1373
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001374 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +00001375 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001376 return next_location;
1377}
1378
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001379Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001380 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001381}
1382
Serban Constantinescu579885a2015-02-22 20:51:33 +00001383CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
1384 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001385 const CompilerOptions& compiler_options,
1386 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001387 : CodeGenerator(graph,
1388 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001389 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001390 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001391 callee_saved_core_registers.GetList(),
1392 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001393 compiler_options,
1394 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001395 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +08001396 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001397 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001398 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +00001399 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001400 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001401 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001402 uint32_literals_(std::less<uint32_t>(),
1403 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001404 uint64_literals_(std::less<uint64_t>(),
1405 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001406 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1407 boot_image_string_patches_(StringReferenceValueComparator(),
1408 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1409 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001410 boot_image_type_patches_(TypeReferenceValueComparator(),
1411 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1412 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001413 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001414 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001415 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1416 jit_class_patches_(TypeReferenceValueComparator(),
1417 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001418 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001419 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001420}
Alexandre Rames5319def2014-10-23 10:03:10 +01001421
Alexandre Rames67555f72014-11-18 10:55:16 +00001422#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001423
Zheng Xu3927c8b2015-11-18 17:46:25 +08001424void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001425 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001426 jump_table->EmitTable(this);
1427 }
1428}
1429
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001430void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001431 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001432 // Ensure we emit the literal pool.
1433 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001434
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001435 CodeGenerator::Finalize(allocator);
1436}
1437
Zheng Xuad4450e2015-04-17 18:48:56 +08001438void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1439 // Note: There are 6 kinds of moves:
1440 // 1. constant -> GPR/FPR (non-cycle)
1441 // 2. constant -> stack (non-cycle)
1442 // 3. GPR/FPR -> GPR/FPR
1443 // 4. GPR/FPR -> stack
1444 // 5. stack -> GPR/FPR
1445 // 6. stack -> stack (non-cycle)
1446 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1447 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1448 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1449 // dependency.
1450 vixl_temps_.Open(GetVIXLAssembler());
1451}
1452
1453void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1454 vixl_temps_.Close();
1455}
1456
1457Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1458 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1459 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1460 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1461 Location scratch = GetScratchLocation(kind);
1462 if (!scratch.Equals(Location::NoLocation())) {
1463 return scratch;
1464 }
1465 // Allocate from VIXL temp registers.
1466 if (kind == Location::kRegister) {
1467 scratch = LocationFrom(vixl_temps_.AcquireX());
1468 } else {
1469 DCHECK(kind == Location::kFpuRegister);
1470 scratch = LocationFrom(vixl_temps_.AcquireD());
1471 }
1472 AddScratchLocation(scratch);
1473 return scratch;
1474}
1475
1476void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1477 if (loc.IsRegister()) {
1478 vixl_temps_.Release(XRegisterFrom(loc));
1479 } else {
1480 DCHECK(loc.IsFpuRegister());
1481 vixl_temps_.Release(DRegisterFrom(loc));
1482 }
1483 RemoveScratchLocation(loc);
1484}
1485
Alexandre Rames3e69f162014-12-10 10:36:50 +00001486void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001487 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001488 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001489}
1490
Alexandre Rames5319def2014-10-23 10:03:10 +01001491void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001492 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001493 __ Bind(&frame_entry_label_);
1494
Serban Constantinescu02164b32014-11-13 14:05:07 +00001495 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1496 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001497 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001498 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001499 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001500 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001501 {
1502 // Ensure that between load and RecordPcInfo there are no pools emitted.
1503 ExactAssemblyScope eas(GetVIXLAssembler(),
1504 kInstructionSize,
1505 CodeBufferCheckScope::kExactSize);
1506 __ ldr(wzr, MemOperand(temp, 0));
1507 RecordPcInfo(nullptr, 0);
1508 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001509 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001510
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001511 if (!HasEmptyFrame()) {
1512 int frame_size = GetFrameSize();
1513 // Stack layout:
1514 // sp[frame_size - 8] : lr.
1515 // ... : other preserved core registers.
1516 // ... : other preserved fp registers.
1517 // ... : reserved frame space.
1518 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001519
1520 // Save the current method if we need it. Note that we do not
1521 // do this in HCurrentMethod, as the instruction might have been removed
1522 // in the SSA graph.
1523 if (RequiresCurrentMethod()) {
1524 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001525 } else {
1526 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001527 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001528 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001529 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1530 frame_size - GetCoreSpillSize());
1531 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1532 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001533
1534 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1535 // Initialize should_deoptimize flag to 0.
1536 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1537 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1538 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001539 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001540}
1541
1542void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001543 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001544 if (!HasEmptyFrame()) {
1545 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001546 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1547 frame_size - FrameEntrySpillSize());
1548 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1549 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001550 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001551 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001552 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001553 __ Ret();
1554 GetAssembler()->cfi().RestoreState();
1555 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001556}
1557
Scott Wakeling97c72b72016-06-24 16:19:36 +01001558CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001559 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001560 return CPURegList(CPURegister::kRegister, kXRegSize,
1561 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001562}
1563
Scott Wakeling97c72b72016-06-24 16:19:36 +01001564CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001565 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1566 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001567 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1568 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001569}
1570
Alexandre Rames5319def2014-10-23 10:03:10 +01001571void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1572 __ Bind(GetLabelOf(block));
1573}
1574
Calin Juravle175dc732015-08-25 15:42:32 +01001575void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1576 DCHECK(location.IsRegister());
1577 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1578}
1579
Calin Juravlee460d1d2015-09-29 04:52:17 +01001580void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1581 if (location.IsRegister()) {
1582 locations->AddTemp(location);
1583 } else {
1584 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1585 }
1586}
1587
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001588void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001589 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001590 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001591 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001592 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001593 if (value_can_be_null) {
1594 __ Cbz(value, &done);
1595 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001596 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001597 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001598 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001599 if (value_can_be_null) {
1600 __ Bind(&done);
1601 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001602}
1603
David Brazdil58282f42016-01-14 12:45:10 +00001604void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001605 // Blocked core registers:
1606 // lr : Runtime reserved.
1607 // tr : Runtime reserved.
1608 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1609 // ip1 : VIXL core temp.
1610 // ip0 : VIXL core temp.
1611 //
1612 // Blocked fp registers:
1613 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001614 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1615 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001616 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001617 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001618 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001619
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001620 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001621 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001622 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001623 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001624
David Brazdil58282f42016-01-14 12:45:10 +00001625 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001626 // Stubs do not save callee-save floating point registers. If the graph
1627 // is debuggable, we need to deal with these registers differently. For
1628 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001629 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1630 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001631 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001632 }
1633 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001634}
1635
Alexandre Rames3e69f162014-12-10 10:36:50 +00001636size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1637 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1638 __ Str(reg, MemOperand(sp, stack_index));
1639 return kArm64WordSize;
1640}
1641
1642size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1643 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1644 __ Ldr(reg, MemOperand(sp, stack_index));
1645 return kArm64WordSize;
1646}
1647
1648size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1649 FPRegister reg = FPRegister(reg_id, kDRegSize);
1650 __ Str(reg, MemOperand(sp, stack_index));
1651 return kArm64WordSize;
1652}
1653
1654size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1655 FPRegister reg = FPRegister(reg_id, kDRegSize);
1656 __ Ldr(reg, MemOperand(sp, stack_index));
1657 return kArm64WordSize;
1658}
1659
Alexandre Rames5319def2014-10-23 10:03:10 +01001660void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001661 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001662}
1663
1664void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001665 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001666}
1667
Alexandre Rames67555f72014-11-18 10:55:16 +00001668void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001669 if (constant->IsIntConstant()) {
1670 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1671 } else if (constant->IsLongConstant()) {
1672 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1673 } else if (constant->IsNullConstant()) {
1674 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001675 } else if (constant->IsFloatConstant()) {
1676 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1677 } else {
1678 DCHECK(constant->IsDoubleConstant());
1679 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1680 }
1681}
1682
Alexandre Rames3e69f162014-12-10 10:36:50 +00001683
1684static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1685 DCHECK(constant.IsConstant());
1686 HConstant* cst = constant.GetConstant();
1687 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001688 // Null is mapped to a core W register, which we associate with kPrimInt.
1689 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001690 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1691 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1692 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1693}
1694
Roland Levillain558dea12017-01-27 19:40:44 +00001695// Allocate a scratch register from the VIXL pool, querying first into
1696// the floating-point register pool, and then the the core register
1697// pool. This is essentially a reimplementation of
1698// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1699// using a different allocation strategy.
1700static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1701 vixl::aarch64::UseScratchRegisterScope* temps,
1702 int size_in_bits) {
1703 return masm->GetScratchFPRegisterList()->IsEmpty()
1704 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1705 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1706}
1707
Calin Juravlee460d1d2015-09-29 04:52:17 +01001708void CodeGeneratorARM64::MoveLocation(Location destination,
1709 Location source,
1710 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001711 if (source.Equals(destination)) {
1712 return;
1713 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001714
1715 // A valid move can always be inferred from the destination and source
1716 // locations. When moving from and to a register, the argument type can be
1717 // used to generate 32bit instead of 64bit moves. In debug mode we also
1718 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001719 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001720
1721 if (destination.IsRegister() || destination.IsFpuRegister()) {
1722 if (unspecified_type) {
1723 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1724 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001725 (src_cst != nullptr && (src_cst->IsIntConstant()
1726 || src_cst->IsFloatConstant()
1727 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001728 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001729 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001730 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001731 // If the source is a double stack slot or a 64bit constant, a 64bit
1732 // type is appropriate. Else the source is a register, and since the
1733 // type has not been specified, we chose a 64bit type to force a 64bit
1734 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001735 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001736 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001737 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001738 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1739 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1740 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001741 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1742 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1743 __ Ldr(dst, StackOperandFrom(source));
1744 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001745 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001746 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001747 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001748 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001749 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001750 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001751 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001752 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1753 ? Primitive::kPrimLong
1754 : Primitive::kPrimInt;
1755 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1756 }
1757 } else {
1758 DCHECK(source.IsFpuRegister());
1759 if (destination.IsRegister()) {
1760 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1761 ? Primitive::kPrimDouble
1762 : Primitive::kPrimFloat;
1763 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1764 } else {
1765 DCHECK(destination.IsFpuRegister());
1766 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001767 }
1768 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001769 } else { // The destination is not a register. It must be a stack slot.
1770 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1771 if (source.IsRegister() || source.IsFpuRegister()) {
1772 if (unspecified_type) {
1773 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001774 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001775 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001776 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001777 }
1778 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001779 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1780 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1781 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001782 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001783 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1784 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001785 UseScratchRegisterScope temps(GetVIXLAssembler());
1786 HConstant* src_cst = source.GetConstant();
1787 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001788 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001789 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1790 ? Register(xzr)
1791 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001792 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001793 if (src_cst->IsIntConstant()) {
1794 temp = temps.AcquireW();
1795 } else if (src_cst->IsLongConstant()) {
1796 temp = temps.AcquireX();
1797 } else if (src_cst->IsFloatConstant()) {
1798 temp = temps.AcquireS();
1799 } else {
1800 DCHECK(src_cst->IsDoubleConstant());
1801 temp = temps.AcquireD();
1802 }
1803 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001804 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001805 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001806 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001807 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001808 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001809 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001810 // Use any scratch register (a core or a floating-point one)
1811 // from VIXL scratch register pools as a temporary.
1812 //
1813 // We used to only use the FP scratch register pool, but in some
1814 // rare cases the only register from this pool (D31) would
1815 // already be used (e.g. within a ParallelMove instruction, when
1816 // a move is blocked by a another move requiring a scratch FP
1817 // register, which would reserve D31). To prevent this issue, we
1818 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001819 //
1820 // Also, we start by asking for a FP scratch register first, as the
1821 // demand of scratch core registers is higher. This is why we
1822 // use AcquireFPOrCoreCPURegisterOfSize instead of
1823 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1824 // allocates core scratch registers first.
1825 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1826 GetVIXLAssembler(),
1827 &temps,
1828 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001829 __ Ldr(temp, StackOperandFrom(source));
1830 __ Str(temp, StackOperandFrom(destination));
1831 }
1832 }
1833}
1834
1835void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001836 CPURegister dst,
1837 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001838 switch (type) {
1839 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001840 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001841 break;
1842 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001843 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001844 break;
1845 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001846 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001847 break;
1848 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001849 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001850 break;
1851 case Primitive::kPrimInt:
1852 case Primitive::kPrimNot:
1853 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001854 case Primitive::kPrimFloat:
1855 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001856 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001857 __ Ldr(dst, src);
1858 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001859 case Primitive::kPrimVoid:
1860 LOG(FATAL) << "Unreachable type " << type;
1861 }
1862}
1863
Calin Juravle77520bc2015-01-12 18:45:46 +00001864void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001865 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001866 const MemOperand& src,
1867 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001868 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001869 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001870 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001871 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001872
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001873 DCHECK(!src.IsPreIndex());
1874 DCHECK(!src.IsPostIndex());
1875
1876 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001877 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001878 {
1879 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1880 MemOperand base = MemOperand(temp_base);
1881 switch (type) {
1882 case Primitive::kPrimBoolean:
1883 {
1884 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1885 __ ldarb(Register(dst), base);
1886 if (needs_null_check) {
1887 MaybeRecordImplicitNullCheck(instruction);
1888 }
1889 }
1890 break;
1891 case Primitive::kPrimByte:
1892 {
1893 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1894 __ ldarb(Register(dst), base);
1895 if (needs_null_check) {
1896 MaybeRecordImplicitNullCheck(instruction);
1897 }
1898 }
1899 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1900 break;
1901 case Primitive::kPrimChar:
1902 {
1903 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1904 __ ldarh(Register(dst), base);
1905 if (needs_null_check) {
1906 MaybeRecordImplicitNullCheck(instruction);
1907 }
1908 }
1909 break;
1910 case Primitive::kPrimShort:
1911 {
1912 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1913 __ ldarh(Register(dst), base);
1914 if (needs_null_check) {
1915 MaybeRecordImplicitNullCheck(instruction);
1916 }
1917 }
1918 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1919 break;
1920 case Primitive::kPrimInt:
1921 case Primitive::kPrimNot:
1922 case Primitive::kPrimLong:
1923 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
1924 {
1925 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1926 __ ldar(Register(dst), base);
1927 if (needs_null_check) {
1928 MaybeRecordImplicitNullCheck(instruction);
1929 }
1930 }
1931 break;
1932 case Primitive::kPrimFloat:
1933 case Primitive::kPrimDouble: {
1934 DCHECK(dst.IsFPRegister());
1935 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001936
Artem Serov914d7a82017-02-07 14:33:49 +00001937 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1938 {
1939 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1940 __ ldar(temp, base);
1941 if (needs_null_check) {
1942 MaybeRecordImplicitNullCheck(instruction);
1943 }
1944 }
1945 __ Fmov(FPRegister(dst), temp);
1946 break;
Roland Levillain44015862016-01-22 11:47:17 +00001947 }
Artem Serov914d7a82017-02-07 14:33:49 +00001948 case Primitive::kPrimVoid:
1949 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001950 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001951 }
1952}
1953
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001954void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001955 CPURegister src,
1956 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001957 switch (type) {
1958 case Primitive::kPrimBoolean:
1959 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001960 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001961 break;
1962 case Primitive::kPrimChar:
1963 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001964 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001965 break;
1966 case Primitive::kPrimInt:
1967 case Primitive::kPrimNot:
1968 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001969 case Primitive::kPrimFloat:
1970 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001971 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001972 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001973 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001974 case Primitive::kPrimVoid:
1975 LOG(FATAL) << "Unreachable type " << type;
1976 }
1977}
1978
Artem Serov914d7a82017-02-07 14:33:49 +00001979void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
1980 Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001981 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001982 const MemOperand& dst,
1983 bool needs_null_check) {
1984 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001985 UseScratchRegisterScope temps(GetVIXLAssembler());
1986 Register temp_base = temps.AcquireX();
1987
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001988 DCHECK(!dst.IsPreIndex());
1989 DCHECK(!dst.IsPostIndex());
1990
1991 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001992 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001993 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001994 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001995 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001996 switch (type) {
1997 case Primitive::kPrimBoolean:
1998 case Primitive::kPrimByte:
Artem Serov914d7a82017-02-07 14:33:49 +00001999 {
2000 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2001 __ stlrb(Register(src), base);
2002 if (needs_null_check) {
2003 MaybeRecordImplicitNullCheck(instruction);
2004 }
2005 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002006 break;
2007 case Primitive::kPrimChar:
2008 case Primitive::kPrimShort:
Artem Serov914d7a82017-02-07 14:33:49 +00002009 {
2010 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2011 __ stlrh(Register(src), base);
2012 if (needs_null_check) {
2013 MaybeRecordImplicitNullCheck(instruction);
2014 }
2015 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002016 break;
2017 case Primitive::kPrimInt:
2018 case Primitive::kPrimNot:
2019 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00002020 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002021 {
2022 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2023 __ stlr(Register(src), base);
2024 if (needs_null_check) {
2025 MaybeRecordImplicitNullCheck(instruction);
2026 }
2027 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002028 break;
2029 case Primitive::kPrimFloat:
2030 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00002031 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002032 Register temp_src;
2033 if (src.IsZero()) {
2034 // The zero register is used to avoid synthesizing zero constants.
2035 temp_src = Register(src);
2036 } else {
2037 DCHECK(src.IsFPRegister());
2038 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2039 __ Fmov(temp_src, FPRegister(src));
2040 }
Artem Serov914d7a82017-02-07 14:33:49 +00002041 {
2042 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2043 __ stlr(temp_src, base);
2044 if (needs_null_check) {
2045 MaybeRecordImplicitNullCheck(instruction);
2046 }
2047 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002048 break;
2049 }
2050 case Primitive::kPrimVoid:
2051 LOG(FATAL) << "Unreachable type " << type;
2052 }
2053}
2054
Calin Juravle175dc732015-08-25 15:42:32 +01002055void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
2056 HInstruction* instruction,
2057 uint32_t dex_pc,
2058 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002059 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00002060
2061 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
2062 {
2063 // Ensure the pc position is recorded immediately after the `blr` instruction.
2064 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
2065 __ blr(lr);
2066 if (EntrypointRequiresStackMap(entrypoint)) {
2067 RecordPcInfo(instruction, dex_pc, slow_path);
2068 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002069 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002070}
2071
Roland Levillaindec8f632016-07-22 17:10:06 +01002072void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2073 HInstruction* instruction,
2074 SlowPathCode* slow_path) {
2075 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01002076 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2077 __ Blr(lr);
2078}
2079
Alexandre Rames67555f72014-11-18 10:55:16 +00002080void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002081 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002082 UseScratchRegisterScope temps(GetVIXLAssembler());
2083 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002084 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
2085
Serban Constantinescu02164b32014-11-13 14:05:07 +00002086 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002087 // TODO(vixl): Let the MacroAssembler handle MemOperand.
2088 __ Add(temp, class_reg, status_offset);
2089 __ Ldar(temp, HeapOperand(temp));
2090 __ Cmp(temp, mirror::Class::kStatusInitialized);
2091 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002092 __ Bind(slow_path->GetExitLabel());
2093}
Alexandre Rames5319def2014-10-23 10:03:10 +01002094
Roland Levillain44015862016-01-22 11:47:17 +00002095void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002096 BarrierType type = BarrierAll;
2097
2098 switch (kind) {
2099 case MemBarrierKind::kAnyAny:
2100 case MemBarrierKind::kAnyStore: {
2101 type = BarrierAll;
2102 break;
2103 }
2104 case MemBarrierKind::kLoadAny: {
2105 type = BarrierReads;
2106 break;
2107 }
2108 case MemBarrierKind::kStoreStore: {
2109 type = BarrierWrites;
2110 break;
2111 }
2112 default:
2113 LOG(FATAL) << "Unexpected memory barrier " << kind;
2114 }
2115 __ Dmb(InnerShareable, type);
2116}
2117
Serban Constantinescu02164b32014-11-13 14:05:07 +00002118void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2119 HBasicBlock* successor) {
2120 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002121 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2122 if (slow_path == nullptr) {
2123 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
2124 instruction->SetSlowPath(slow_path);
2125 codegen_->AddSlowPath(slow_path);
2126 if (successor != nullptr) {
2127 DCHECK(successor->IsLoopHeader());
2128 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
2129 }
2130 } else {
2131 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2132 }
2133
Serban Constantinescu02164b32014-11-13 14:05:07 +00002134 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2135 Register temp = temps.AcquireW();
2136
Andreas Gampe542451c2016-07-26 09:02:02 -07002137 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002138 if (successor == nullptr) {
2139 __ Cbnz(temp, slow_path->GetEntryLabel());
2140 __ Bind(slow_path->GetReturnLabel());
2141 } else {
2142 __ Cbz(temp, codegen_->GetLabelOf(successor));
2143 __ B(slow_path->GetEntryLabel());
2144 // slow_path will return to GetLabelOf(successor).
2145 }
2146}
2147
Alexandre Rames5319def2014-10-23 10:03:10 +01002148InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2149 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002150 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002151 assembler_(codegen->GetAssembler()),
2152 codegen_(codegen) {}
2153
2154#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00002155 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01002156
2157#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
2158
2159enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00002160 // Using a base helps identify when we hit such breakpoints.
2161 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01002162#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
2163 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
2164#undef ENUM_UNIMPLEMENTED_INSTRUCTION
2165};
2166
2167#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002168 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01002169 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
2170 } \
2171 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
2172 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
2173 locations->SetOut(Location::Any()); \
2174 }
2175 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
2176#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
2177
2178#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00002179#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01002180
Alexandre Rames67555f72014-11-18 10:55:16 +00002181void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002182 DCHECK_EQ(instr->InputCount(), 2U);
2183 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2184 Primitive::Type type = instr->GetResultType();
2185 switch (type) {
2186 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002187 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01002188 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002189 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002190 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002191 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002192
2193 case Primitive::kPrimFloat:
2194 case Primitive::kPrimDouble:
2195 locations->SetInAt(0, Location::RequiresFpuRegister());
2196 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002197 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002198 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002199
Alexandre Rames5319def2014-10-23 10:03:10 +01002200 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002201 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002202 }
2203}
2204
Alexandre Rames09a99962015-04-15 11:47:56 +01002205void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002206 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2207
2208 bool object_field_get_with_read_barrier =
2209 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01002210 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002211 new (GetGraph()->GetArena()) LocationSummary(instruction,
2212 object_field_get_with_read_barrier ?
2213 LocationSummary::kCallOnSlowPath :
2214 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002215 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002216 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00002217 // We need a temporary register for the read barrier marking slow
2218 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
2219 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko70e97462016-08-09 11:04:26 +01002220 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002221 locations->SetInAt(0, Location::RequiresRegister());
2222 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2223 locations->SetOut(Location::RequiresFpuRegister());
2224 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002225 // The output overlaps for an object field get when read barriers
2226 // are enabled: we do not want the load to overwrite the object's
2227 // location, as we need it to emit the read barrier.
2228 locations->SetOut(
2229 Location::RequiresRegister(),
2230 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002231 }
2232}
2233
2234void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2235 const FieldInfo& field_info) {
2236 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002237 LocationSummary* locations = instruction->GetLocations();
2238 Location base_loc = locations->InAt(0);
2239 Location out = locations->Out();
2240 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01002241 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002242 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002243
Roland Levillain44015862016-01-22 11:47:17 +00002244 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2245 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002246 // /* HeapReference<Object> */ out = *(base + offset)
2247 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
Roland Levillaind0b51832017-01-26 19:04:23 +00002248 Register temp = WRegisterFrom(locations->GetTemp(0));
Roland Levillain44015862016-01-22 11:47:17 +00002249 // Note that potential implicit null checks are handled in this
2250 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2251 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2252 instruction,
2253 out,
2254 base,
2255 offset,
2256 temp,
2257 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002258 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002259 } else {
2260 // General case.
2261 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002262 // Note that a potential implicit null check is handled in this
2263 // CodeGeneratorARM64::LoadAcquire call.
2264 // NB: LoadAcquire will record the pc info if needed.
2265 codegen_->LoadAcquire(
2266 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002267 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002268 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2269 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002270 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002271 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002272 }
Roland Levillain44015862016-01-22 11:47:17 +00002273 if (field_type == Primitive::kPrimNot) {
2274 // If read barriers are enabled, emit read barriers other than
2275 // Baker's using a slow path (and also unpoison the loaded
2276 // reference, if heap poisoning is enabled).
2277 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2278 }
Roland Levillain4d027112015-07-01 15:41:14 +01002279 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002280}
2281
2282void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2283 LocationSummary* locations =
2284 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2285 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002286 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2287 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2288 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002289 locations->SetInAt(1, Location::RequiresFpuRegister());
2290 } else {
2291 locations->SetInAt(1, Location::RequiresRegister());
2292 }
2293}
2294
2295void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002296 const FieldInfo& field_info,
2297 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002298 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2299
2300 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002301 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002302 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002303 Offset offset = field_info.GetFieldOffset();
2304 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002305
Roland Levillain4d027112015-07-01 15:41:14 +01002306 {
2307 // We use a block to end the scratch scope before the write barrier, thus
2308 // freeing the temporary registers so they can be used in `MarkGCCard`.
2309 UseScratchRegisterScope temps(GetVIXLAssembler());
2310
2311 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
2312 DCHECK(value.IsW());
2313 Register temp = temps.AcquireW();
2314 __ Mov(temp, value.W());
2315 GetAssembler()->PoisonHeapReference(temp.W());
2316 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002317 }
Roland Levillain4d027112015-07-01 15:41:14 +01002318
2319 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002320 codegen_->StoreRelease(
2321 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002322 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002323 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2324 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002325 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2326 codegen_->MaybeRecordImplicitNullCheck(instruction);
2327 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002328 }
2329
2330 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002331 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002332 }
2333}
2334
Alexandre Rames67555f72014-11-18 10:55:16 +00002335void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002336 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002337
2338 switch (type) {
2339 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002340 case Primitive::kPrimLong: {
2341 Register dst = OutputRegister(instr);
2342 Register lhs = InputRegisterAt(instr, 0);
2343 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002344 if (instr->IsAdd()) {
2345 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002346 } else if (instr->IsAnd()) {
2347 __ And(dst, lhs, rhs);
2348 } else if (instr->IsOr()) {
2349 __ Orr(dst, lhs, rhs);
2350 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002351 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002352 } else if (instr->IsRor()) {
2353 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002354 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002355 __ Ror(dst, lhs, shift);
2356 } else {
2357 // Ensure shift distance is in the same size register as the result. If
2358 // we are rotating a long and the shift comes in a w register originally,
2359 // we don't need to sxtw for use as an x since the shift distances are
2360 // all & reg_bits - 1.
2361 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2362 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002363 } else {
2364 DCHECK(instr->IsXor());
2365 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002366 }
2367 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002368 }
2369 case Primitive::kPrimFloat:
2370 case Primitive::kPrimDouble: {
2371 FPRegister dst = OutputFPRegister(instr);
2372 FPRegister lhs = InputFPRegisterAt(instr, 0);
2373 FPRegister rhs = InputFPRegisterAt(instr, 1);
2374 if (instr->IsAdd()) {
2375 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002376 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002377 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002378 } else {
2379 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002380 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002381 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002382 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002383 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002384 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002385 }
2386}
2387
Serban Constantinescu02164b32014-11-13 14:05:07 +00002388void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2389 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2390
2391 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2392 Primitive::Type type = instr->GetResultType();
2393 switch (type) {
2394 case Primitive::kPrimInt:
2395 case Primitive::kPrimLong: {
2396 locations->SetInAt(0, Location::RequiresRegister());
2397 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002398 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002399 break;
2400 }
2401 default:
2402 LOG(FATAL) << "Unexpected shift type " << type;
2403 }
2404}
2405
2406void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2407 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2408
2409 Primitive::Type type = instr->GetType();
2410 switch (type) {
2411 case Primitive::kPrimInt:
2412 case Primitive::kPrimLong: {
2413 Register dst = OutputRegister(instr);
2414 Register lhs = InputRegisterAt(instr, 0);
2415 Operand rhs = InputOperandAt(instr, 1);
2416 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002417 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00002418 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002419 if (instr->IsShl()) {
2420 __ Lsl(dst, lhs, shift_value);
2421 } else if (instr->IsShr()) {
2422 __ Asr(dst, lhs, shift_value);
2423 } else {
2424 __ Lsr(dst, lhs, shift_value);
2425 }
2426 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002427 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002428
2429 if (instr->IsShl()) {
2430 __ Lsl(dst, lhs, rhs_reg);
2431 } else if (instr->IsShr()) {
2432 __ Asr(dst, lhs, rhs_reg);
2433 } else {
2434 __ Lsr(dst, lhs, rhs_reg);
2435 }
2436 }
2437 break;
2438 }
2439 default:
2440 LOG(FATAL) << "Unexpected shift operation type " << type;
2441 }
2442}
2443
Alexandre Rames5319def2014-10-23 10:03:10 +01002444void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002445 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002446}
2447
2448void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002449 HandleBinaryOp(instruction);
2450}
2451
2452void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2453 HandleBinaryOp(instruction);
2454}
2455
2456void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2457 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002458}
2459
Artem Serov7fc63502016-02-09 17:15:29 +00002460void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002461 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
2462 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2463 locations->SetInAt(0, Location::RequiresRegister());
2464 // There is no immediate variant of negated bitwise instructions in AArch64.
2465 locations->SetInAt(1, Location::RequiresRegister());
2466 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2467}
2468
Artem Serov7fc63502016-02-09 17:15:29 +00002469void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002470 Register dst = OutputRegister(instr);
2471 Register lhs = InputRegisterAt(instr, 0);
2472 Register rhs = InputRegisterAt(instr, 1);
2473
2474 switch (instr->GetOpKind()) {
2475 case HInstruction::kAnd:
2476 __ Bic(dst, lhs, rhs);
2477 break;
2478 case HInstruction::kOr:
2479 __ Orn(dst, lhs, rhs);
2480 break;
2481 case HInstruction::kXor:
2482 __ Eon(dst, lhs, rhs);
2483 break;
2484 default:
2485 LOG(FATAL) << "Unreachable";
2486 }
2487}
2488
Anton Kirilov74234da2017-01-13 14:42:47 +00002489void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2490 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002491 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
2492 instruction->GetType() == Primitive::kPrimLong);
2493 LocationSummary* locations =
2494 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2495 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2496 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2497 } else {
2498 locations->SetInAt(0, Location::RequiresRegister());
2499 }
2500 locations->SetInAt(1, Location::RequiresRegister());
2501 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2502}
2503
Anton Kirilov74234da2017-01-13 14:42:47 +00002504void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2505 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002506 Primitive::Type type = instruction->GetType();
2507 HInstruction::InstructionKind kind = instruction->GetInstrKind();
2508 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2509 Register out = OutputRegister(instruction);
2510 Register left;
2511 if (kind != HInstruction::kNeg) {
2512 left = InputRegisterAt(instruction, 0);
2513 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002514 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002515 // shifter operand operation, the IR generating `right_reg` (input to the type
2516 // conversion) can have a different type from the current instruction's type,
2517 // so we manually indicate the type.
2518 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002519 Operand right_operand(0);
2520
Anton Kirilov74234da2017-01-13 14:42:47 +00002521 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2522 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002523 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2524 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002525 right_operand = Operand(right_reg,
2526 helpers::ShiftFromOpKind(op_kind),
2527 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002528 }
2529
2530 // Logical binary operations do not support extension operations in the
2531 // operand. Note that VIXL would still manage if it was passed by generating
2532 // the extension as a separate instruction.
2533 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2534 DCHECK(!right_operand.IsExtendedRegister() ||
2535 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2536 kind != HInstruction::kNeg));
2537 switch (kind) {
2538 case HInstruction::kAdd:
2539 __ Add(out, left, right_operand);
2540 break;
2541 case HInstruction::kAnd:
2542 __ And(out, left, right_operand);
2543 break;
2544 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002545 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002546 __ Neg(out, right_operand);
2547 break;
2548 case HInstruction::kOr:
2549 __ Orr(out, left, right_operand);
2550 break;
2551 case HInstruction::kSub:
2552 __ Sub(out, left, right_operand);
2553 break;
2554 case HInstruction::kXor:
2555 __ Eor(out, left, right_operand);
2556 break;
2557 default:
2558 LOG(FATAL) << "Unexpected operation kind: " << kind;
2559 UNREACHABLE();
2560 }
2561}
2562
Artem Serov328429f2016-07-06 16:23:04 +01002563void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002564 LocationSummary* locations =
2565 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2566 locations->SetInAt(0, Location::RequiresRegister());
2567 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002568 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002569}
2570
Roland Levillain19c54192016-11-04 13:44:09 +00002571void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002572 __ Add(OutputRegister(instruction),
2573 InputRegisterAt(instruction, 0),
2574 Operand(InputOperandAt(instruction, 1)));
2575}
2576
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002577void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002578 LocationSummary* locations =
2579 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002580 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2581 if (instr->GetOpKind() == HInstruction::kSub &&
2582 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002583 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002584 // Don't allocate register for Mneg instruction.
2585 } else {
2586 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2587 Location::RequiresRegister());
2588 }
2589 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2590 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002591 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2592}
2593
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002594void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002595 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002596 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2597 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002598
2599 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2600 // This fixup should be carried out for all multiply-accumulate instructions:
2601 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2602 if (instr->GetType() == Primitive::kPrimLong &&
2603 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2604 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002605 vixl::aarch64::Instruction* prev =
2606 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002607 if (prev->IsLoadOrStore()) {
2608 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002609 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002610 __ nop();
2611 }
2612 }
2613
2614 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002615 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002616 __ Madd(res, mul_left, mul_right, accumulator);
2617 } else {
2618 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002619 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002620 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002621 __ Mneg(res, mul_left, mul_right);
2622 } else {
2623 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2624 __ Msub(res, mul_left, mul_right, accumulator);
2625 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002626 }
2627}
2628
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002629void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002630 bool object_array_get_with_read_barrier =
2631 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002632 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002633 new (GetGraph()->GetArena()) LocationSummary(instruction,
2634 object_array_get_with_read_barrier ?
2635 LocationSummary::kCallOnSlowPath :
2636 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002637 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002638 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillain54f869e2017-03-06 13:54:11 +00002639 // We need a temporary register for the read barrier marking slow
2640 // path in CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier.
2641 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko70e97462016-08-09 11:04:26 +01002642 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002643 locations->SetInAt(0, Location::RequiresRegister());
2644 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002645 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2646 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2647 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002648 // The output overlaps in the case of an object array get with
2649 // read barriers enabled: we do not want the move to overwrite the
2650 // array's location, as we need it to emit the read barrier.
2651 locations->SetOut(
2652 Location::RequiresRegister(),
2653 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002654 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002655}
2656
2657void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002658 Primitive::Type type = instruction->GetType();
2659 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002660 LocationSummary* locations = instruction->GetLocations();
2661 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002662 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002663 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002664 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2665 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002666 MacroAssembler* masm = GetVIXLAssembler();
2667 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002668
Roland Levillain19c54192016-11-04 13:44:09 +00002669 // The read barrier instrumentation of object ArrayGet instructions
2670 // does not support the HIntermediateAddress instruction.
2671 DCHECK(!((type == Primitive::kPrimNot) &&
2672 instruction->GetArray()->IsIntermediateAddress() &&
2673 kEmitCompilerReadBarrier));
2674
Roland Levillain44015862016-01-22 11:47:17 +00002675 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2676 // Object ArrayGet with Baker's read barrier case.
Roland Levillain54f869e2017-03-06 13:54:11 +00002677 Register temp = WRegisterFrom(locations->GetTemp(0));
Roland Levillain44015862016-01-22 11:47:17 +00002678 // Note that a potential implicit null check is handled in the
2679 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2680 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2681 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002682 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002683 // General case.
2684 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002685 Register length;
2686 if (maybe_compressed_char_at) {
2687 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2688 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002689 {
2690 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2691 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2692
2693 if (instruction->GetArray()->IsIntermediateAddress()) {
2694 DCHECK_LT(count_offset, offset);
2695 int64_t adjusted_offset =
2696 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2697 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2698 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2699 } else {
2700 __ Ldr(length, HeapOperand(obj, count_offset));
2701 }
2702 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002703 }
jessicahandojo05765752016-09-09 19:01:32 -07002704 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002705 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002706 if (maybe_compressed_char_at) {
2707 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002708 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2709 "Expecting 0=compressed, 1=uncompressed");
2710 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002711 __ Ldrb(Register(OutputCPURegister(instruction)),
2712 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2713 __ B(&done);
2714 __ Bind(&uncompressed_load);
2715 __ Ldrh(Register(OutputCPURegister(instruction)),
2716 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2717 __ Bind(&done);
2718 } else {
2719 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2720 source = HeapOperand(obj, offset);
2721 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002722 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002723 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002724 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002725 // We do not need to compute the intermediate address from the array: the
2726 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002727 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002728 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002729 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002730 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2731 }
2732 temp = obj;
2733 } else {
2734 __ Add(temp, obj, offset);
2735 }
jessicahandojo05765752016-09-09 19:01:32 -07002736 if (maybe_compressed_char_at) {
2737 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002738 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2739 "Expecting 0=compressed, 1=uncompressed");
2740 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002741 __ Ldrb(Register(OutputCPURegister(instruction)),
2742 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2743 __ B(&done);
2744 __ Bind(&uncompressed_load);
2745 __ Ldrh(Register(OutputCPURegister(instruction)),
2746 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2747 __ Bind(&done);
2748 } else {
2749 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2750 }
Roland Levillain44015862016-01-22 11:47:17 +00002751 }
jessicahandojo05765752016-09-09 19:01:32 -07002752 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002753 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2754 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002755 codegen_->Load(type, OutputCPURegister(instruction), source);
2756 codegen_->MaybeRecordImplicitNullCheck(instruction);
2757 }
Roland Levillain44015862016-01-22 11:47:17 +00002758
2759 if (type == Primitive::kPrimNot) {
2760 static_assert(
2761 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2762 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2763 Location obj_loc = locations->InAt(0);
2764 if (index.IsConstant()) {
2765 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2766 } else {
2767 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2768 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002769 }
Roland Levillain4d027112015-07-01 15:41:14 +01002770 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002771}
2772
Alexandre Rames5319def2014-10-23 10:03:10 +01002773void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2774 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2775 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002776 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002777}
2778
2779void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002780 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002781 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002782 {
2783 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2784 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2785 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2786 codegen_->MaybeRecordImplicitNullCheck(instruction);
2787 }
jessicahandojo05765752016-09-09 19:01:32 -07002788 // Mask out compression flag from String's array length.
2789 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002790 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002791 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002792}
2793
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002794void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002795 Primitive::Type value_type = instruction->GetComponentType();
2796
2797 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002798 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2799 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002800 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002801 LocationSummary::kCallOnSlowPath :
2802 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002803 locations->SetInAt(0, Location::RequiresRegister());
2804 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002805 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2806 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2807 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002808 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002809 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002810 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002811 }
2812}
2813
2814void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2815 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002816 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002817 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002818 bool needs_write_barrier =
2819 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002820
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002821 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002822 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002823 CPURegister source = value;
2824 Location index = locations->InAt(1);
2825 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2826 MemOperand destination = HeapOperand(array);
2827 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002828
2829 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002830 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002831 if (index.IsConstant()) {
2832 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2833 destination = HeapOperand(array, offset);
2834 } else {
2835 UseScratchRegisterScope temps(masm);
2836 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002837 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002838 // We do not need to compute the intermediate address from the array: the
2839 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002840 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002841 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002842 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002843 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2844 }
2845 temp = array;
2846 } else {
2847 __ Add(temp, array, offset);
2848 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002849 destination = HeapOperand(temp,
2850 XRegisterFrom(index),
2851 LSL,
2852 Primitive::ComponentSizeShift(value_type));
2853 }
Artem Serov914d7a82017-02-07 14:33:49 +00002854 {
2855 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2856 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2857 codegen_->Store(value_type, value, destination);
2858 codegen_->MaybeRecordImplicitNullCheck(instruction);
2859 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002860 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002861 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002862 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002863 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002864 {
2865 // We use a block to end the scratch scope before the write barrier, thus
2866 // freeing the temporary registers so they can be used in `MarkGCCard`.
2867 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002868 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002869 if (index.IsConstant()) {
2870 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002871 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002872 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002873 destination = HeapOperand(temp,
2874 XRegisterFrom(index),
2875 LSL,
2876 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002877 }
2878
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002879 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2880 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2881 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2882
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002883 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002884 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2885 codegen_->AddSlowPath(slow_path);
2886 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002887 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002888 __ Cbnz(Register(value), &non_zero);
2889 if (!index.IsConstant()) {
2890 __ Add(temp, array, offset);
2891 }
Artem Serov914d7a82017-02-07 14:33:49 +00002892 {
2893 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2894 // emitted.
2895 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2896 __ Str(wzr, destination);
2897 codegen_->MaybeRecordImplicitNullCheck(instruction);
2898 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002899 __ B(&done);
2900 __ Bind(&non_zero);
2901 }
2902
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002903 // Note that when Baker read barriers are enabled, the type
2904 // checks are performed without read barriers. This is fine,
2905 // even in the case where a class object is in the from-space
2906 // after the flip, as a comparison involving such a type would
2907 // not produce a false positive; it may of course produce a
2908 // false negative, in which case we would take the ArraySet
2909 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002910
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002911 Register temp2 = temps.AcquireSameSizeAs(array);
2912 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002913 {
2914 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2915 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2916 __ Ldr(temp, HeapOperand(array, class_offset));
2917 codegen_->MaybeRecordImplicitNullCheck(instruction);
2918 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002919 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002920
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002921 // /* HeapReference<Class> */ temp = temp->component_type_
2922 __ Ldr(temp, HeapOperand(temp, component_offset));
2923 // /* HeapReference<Class> */ temp2 = value->klass_
2924 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2925 // If heap poisoning is enabled, no need to unpoison `temp`
2926 // nor `temp2`, as we are comparing two poisoned references.
2927 __ Cmp(temp, temp2);
2928 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002929
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002930 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2931 vixl::aarch64::Label do_put;
2932 __ B(eq, &do_put);
2933 // If heap poisoning is enabled, the `temp` reference has
2934 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002935 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2936
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002937 // /* HeapReference<Class> */ temp = temp->super_class_
2938 __ Ldr(temp, HeapOperand(temp, super_offset));
2939 // If heap poisoning is enabled, no need to unpoison
2940 // `temp`, as we are comparing against null below.
2941 __ Cbnz(temp, slow_path->GetEntryLabel());
2942 __ Bind(&do_put);
2943 } else {
2944 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002945 }
2946 }
2947
2948 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002949 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002950 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002951 __ Mov(temp2, value.W());
2952 GetAssembler()->PoisonHeapReference(temp2);
2953 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002954 }
2955
2956 if (!index.IsConstant()) {
2957 __ Add(temp, array, offset);
2958 }
Artem Serov914d7a82017-02-07 14:33:49 +00002959 {
2960 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2961 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2962 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002963
Artem Serov914d7a82017-02-07 14:33:49 +00002964 if (!may_need_runtime_call_for_type_check) {
2965 codegen_->MaybeRecordImplicitNullCheck(instruction);
2966 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002967 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002968 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002969
2970 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2971
2972 if (done.IsLinked()) {
2973 __ Bind(&done);
2974 }
2975
2976 if (slow_path != nullptr) {
2977 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002978 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002979 }
2980}
2981
Alexandre Rames67555f72014-11-18 10:55:16 +00002982void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002983 RegisterSet caller_saves = RegisterSet::Empty();
2984 InvokeRuntimeCallingConvention calling_convention;
2985 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2986 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2987 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002988 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002989 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002990}
2991
2992void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002993 BoundsCheckSlowPathARM64* slow_path =
2994 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002995 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002996 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2997 __ B(slow_path->GetEntryLabel(), hs);
2998}
2999
Alexandre Rames67555f72014-11-18 10:55:16 +00003000void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
3001 LocationSummary* locations =
3002 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3003 locations->SetInAt(0, Location::RequiresRegister());
3004 if (check->HasUses()) {
3005 locations->SetOut(Location::SameAsFirstInput());
3006 }
3007}
3008
3009void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3010 // We assume the class is not null.
3011 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3012 check->GetLoadClass(), check, check->GetDexPc(), true);
3013 codegen_->AddSlowPath(slow_path);
3014 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3015}
3016
Roland Levillain1a653882016-03-18 18:05:57 +00003017static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3018 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3019 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3020}
3021
3022void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3023 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3024 Location rhs_loc = instruction->GetLocations()->InAt(1);
3025 if (rhs_loc.IsConstant()) {
3026 // 0.0 is the only immediate that can be encoded directly in
3027 // an FCMP instruction.
3028 //
3029 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3030 // specify that in a floating-point comparison, positive zero
3031 // and negative zero are considered equal, so we can use the
3032 // literal 0.0 for both cases here.
3033 //
3034 // Note however that some methods (Float.equal, Float.compare,
3035 // Float.compareTo, Double.equal, Double.compare,
3036 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3037 // StrictMath.min) consider 0.0 to be (strictly) greater than
3038 // -0.0. So if we ever translate calls to these methods into a
3039 // HCompare instruction, we must handle the -0.0 case with
3040 // care here.
3041 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3042 __ Fcmp(lhs_reg, 0.0);
3043 } else {
3044 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3045 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003046}
3047
Serban Constantinescu02164b32014-11-13 14:05:07 +00003048void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003049 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00003050 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
3051 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003052 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003053 case Primitive::kPrimBoolean:
3054 case Primitive::kPrimByte:
3055 case Primitive::kPrimShort:
3056 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003057 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01003058 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003059 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003060 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003061 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3062 break;
3063 }
3064 case Primitive::kPrimFloat:
3065 case Primitive::kPrimDouble: {
3066 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003067 locations->SetInAt(1,
3068 IsFloatingPointZeroConstant(compare->InputAt(1))
3069 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3070 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003071 locations->SetOut(Location::RequiresRegister());
3072 break;
3073 }
3074 default:
3075 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3076 }
3077}
3078
3079void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
3080 Primitive::Type in_type = compare->InputAt(0)->GetType();
3081
3082 // 0 if: left == right
3083 // 1 if: left > right
3084 // -1 if: left < right
3085 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003086 case Primitive::kPrimBoolean:
3087 case Primitive::kPrimByte:
3088 case Primitive::kPrimShort:
3089 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003090 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00003091 case Primitive::kPrimLong: {
3092 Register result = OutputRegister(compare);
3093 Register left = InputRegisterAt(compare, 0);
3094 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003095 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003096 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3097 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003098 break;
3099 }
3100 case Primitive::kPrimFloat:
3101 case Primitive::kPrimDouble: {
3102 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003103 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003104 __ Cset(result, ne);
3105 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003106 break;
3107 }
3108 default:
3109 LOG(FATAL) << "Unimplemented compare type " << in_type;
3110 }
3111}
3112
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003113void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003114 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003115
3116 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
3117 locations->SetInAt(0, Location::RequiresFpuRegister());
3118 locations->SetInAt(1,
3119 IsFloatingPointZeroConstant(instruction->InputAt(1))
3120 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3121 : Location::RequiresFpuRegister());
3122 } else {
3123 // Integer cases.
3124 locations->SetInAt(0, Location::RequiresRegister());
3125 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3126 }
3127
David Brazdilb3e773e2016-01-26 11:28:37 +00003128 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003129 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003130 }
3131}
3132
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003133void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003134 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003135 return;
3136 }
3137
3138 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003139 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003140 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003141
Roland Levillain7f63c522015-07-13 15:54:55 +00003142 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003143 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003144 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003145 } else {
3146 // Integer cases.
3147 Register lhs = InputRegisterAt(instruction, 0);
3148 Operand rhs = InputOperandAt(instruction, 1);
3149 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003150 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003151 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003152}
3153
3154#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3155 M(Equal) \
3156 M(NotEqual) \
3157 M(LessThan) \
3158 M(LessThanOrEqual) \
3159 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003160 M(GreaterThanOrEqual) \
3161 M(Below) \
3162 M(BelowOrEqual) \
3163 M(Above) \
3164 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003165#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003166void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3167void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003168FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003169#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003170#undef FOR_EACH_CONDITION_INSTRUCTION
3171
Zheng Xuc6667102015-05-15 16:08:45 +08003172void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3173 DCHECK(instruction->IsDiv() || instruction->IsRem());
3174
3175 LocationSummary* locations = instruction->GetLocations();
3176 Location second = locations->InAt(1);
3177 DCHECK(second.IsConstant());
3178
3179 Register out = OutputRegister(instruction);
3180 Register dividend = InputRegisterAt(instruction, 0);
3181 int64_t imm = Int64FromConstant(second.GetConstant());
3182 DCHECK(imm == 1 || imm == -1);
3183
3184 if (instruction->IsRem()) {
3185 __ Mov(out, 0);
3186 } else {
3187 if (imm == 1) {
3188 __ Mov(out, dividend);
3189 } else {
3190 __ Neg(out, dividend);
3191 }
3192 }
3193}
3194
3195void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3196 DCHECK(instruction->IsDiv() || instruction->IsRem());
3197
3198 LocationSummary* locations = instruction->GetLocations();
3199 Location second = locations->InAt(1);
3200 DCHECK(second.IsConstant());
3201
3202 Register out = OutputRegister(instruction);
3203 Register dividend = InputRegisterAt(instruction, 0);
3204 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003205 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003206 int ctz_imm = CTZ(abs_imm);
3207
3208 UseScratchRegisterScope temps(GetVIXLAssembler());
3209 Register temp = temps.AcquireSameSizeAs(out);
3210
3211 if (instruction->IsDiv()) {
3212 __ Add(temp, dividend, abs_imm - 1);
3213 __ Cmp(dividend, 0);
3214 __ Csel(out, temp, dividend, lt);
3215 if (imm > 0) {
3216 __ Asr(out, out, ctz_imm);
3217 } else {
3218 __ Neg(out, Operand(out, ASR, ctz_imm));
3219 }
3220 } else {
3221 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
3222 __ Asr(temp, dividend, bits - 1);
3223 __ Lsr(temp, temp, bits - ctz_imm);
3224 __ Add(out, dividend, temp);
3225 __ And(out, out, abs_imm - 1);
3226 __ Sub(out, out, temp);
3227 }
3228}
3229
3230void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3231 DCHECK(instruction->IsDiv() || instruction->IsRem());
3232
3233 LocationSummary* locations = instruction->GetLocations();
3234 Location second = locations->InAt(1);
3235 DCHECK(second.IsConstant());
3236
3237 Register out = OutputRegister(instruction);
3238 Register dividend = InputRegisterAt(instruction, 0);
3239 int64_t imm = Int64FromConstant(second.GetConstant());
3240
3241 Primitive::Type type = instruction->GetResultType();
3242 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
3243
3244 int64_t magic;
3245 int shift;
3246 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
3247
3248 UseScratchRegisterScope temps(GetVIXLAssembler());
3249 Register temp = temps.AcquireSameSizeAs(out);
3250
3251 // temp = get_high(dividend * magic)
3252 __ Mov(temp, magic);
3253 if (type == Primitive::kPrimLong) {
3254 __ Smulh(temp, dividend, temp);
3255 } else {
3256 __ Smull(temp.X(), dividend, temp);
3257 __ Lsr(temp.X(), temp.X(), 32);
3258 }
3259
3260 if (imm > 0 && magic < 0) {
3261 __ Add(temp, temp, dividend);
3262 } else if (imm < 0 && magic > 0) {
3263 __ Sub(temp, temp, dividend);
3264 }
3265
3266 if (shift != 0) {
3267 __ Asr(temp, temp, shift);
3268 }
3269
3270 if (instruction->IsDiv()) {
3271 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3272 } else {
3273 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3274 // TODO: Strength reduction for msub.
3275 Register temp_imm = temps.AcquireSameSizeAs(out);
3276 __ Mov(temp_imm, imm);
3277 __ Msub(out, temp, temp_imm, dividend);
3278 }
3279}
3280
3281void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3282 DCHECK(instruction->IsDiv() || instruction->IsRem());
3283 Primitive::Type type = instruction->GetResultType();
Calin Juravlec70d1d92017-03-27 18:10:04 -07003284 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
Zheng Xuc6667102015-05-15 16:08:45 +08003285
3286 LocationSummary* locations = instruction->GetLocations();
3287 Register out = OutputRegister(instruction);
3288 Location second = locations->InAt(1);
3289
3290 if (second.IsConstant()) {
3291 int64_t imm = Int64FromConstant(second.GetConstant());
3292
3293 if (imm == 0) {
3294 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3295 } else if (imm == 1 || imm == -1) {
3296 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003297 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08003298 DivRemByPowerOfTwo(instruction);
3299 } else {
3300 DCHECK(imm <= -2 || imm >= 2);
3301 GenerateDivRemWithAnyConstant(instruction);
3302 }
3303 } else {
3304 Register dividend = InputRegisterAt(instruction, 0);
3305 Register divisor = InputRegisterAt(instruction, 1);
3306 if (instruction->IsDiv()) {
3307 __ Sdiv(out, dividend, divisor);
3308 } else {
3309 UseScratchRegisterScope temps(GetVIXLAssembler());
3310 Register temp = temps.AcquireSameSizeAs(out);
3311 __ Sdiv(temp, dividend, divisor);
3312 __ Msub(out, temp, divisor, dividend);
3313 }
3314 }
3315}
3316
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003317void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3318 LocationSummary* locations =
3319 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3320 switch (div->GetResultType()) {
3321 case Primitive::kPrimInt:
3322 case Primitive::kPrimLong:
3323 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003324 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003325 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3326 break;
3327
3328 case Primitive::kPrimFloat:
3329 case Primitive::kPrimDouble:
3330 locations->SetInAt(0, Location::RequiresFpuRegister());
3331 locations->SetInAt(1, Location::RequiresFpuRegister());
3332 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3333 break;
3334
3335 default:
3336 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3337 }
3338}
3339
3340void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
3341 Primitive::Type type = div->GetResultType();
3342 switch (type) {
3343 case Primitive::kPrimInt:
3344 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08003345 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003346 break;
3347
3348 case Primitive::kPrimFloat:
3349 case Primitive::kPrimDouble:
3350 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3351 break;
3352
3353 default:
3354 LOG(FATAL) << "Unexpected div type " << type;
3355 }
3356}
3357
Alexandre Rames67555f72014-11-18 10:55:16 +00003358void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003359 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003360 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003361}
3362
3363void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3364 SlowPathCodeARM64* slow_path =
3365 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
3366 codegen_->AddSlowPath(slow_path);
3367 Location value = instruction->GetLocations()->InAt(0);
3368
Alexandre Rames3e69f162014-12-10 10:36:50 +00003369 Primitive::Type type = instruction->GetType();
3370
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003371 if (!Primitive::IsIntegralType(type)) {
3372 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003373 return;
3374 }
3375
Alexandre Rames67555f72014-11-18 10:55:16 +00003376 if (value.IsConstant()) {
3377 int64_t divisor = Int64ConstantFrom(value);
3378 if (divisor == 0) {
3379 __ B(slow_path->GetEntryLabel());
3380 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003381 // A division by a non-null constant is valid. We don't need to perform
3382 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003383 }
3384 } else {
3385 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3386 }
3387}
3388
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003389void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3390 LocationSummary* locations =
3391 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3392 locations->SetOut(Location::ConstantLocation(constant));
3393}
3394
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003395void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3396 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003397 // Will be generated at use site.
3398}
3399
Alexandre Rames5319def2014-10-23 10:03:10 +01003400void LocationsBuilderARM64::VisitExit(HExit* exit) {
3401 exit->SetLocations(nullptr);
3402}
3403
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003404void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003405}
3406
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003407void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3408 LocationSummary* locations =
3409 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3410 locations->SetOut(Location::ConstantLocation(constant));
3411}
3412
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003413void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003414 // Will be generated at use site.
3415}
3416
David Brazdilfc6a86a2015-06-26 10:33:45 +00003417void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003418 DCHECK(!successor->IsExitBlock());
3419 HBasicBlock* block = got->GetBlock();
3420 HInstruction* previous = got->GetPrevious();
3421 HLoopInformation* info = block->GetLoopInformation();
3422
David Brazdil46e2a392015-03-16 17:31:52 +00003423 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003424 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3425 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3426 return;
3427 }
3428 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3429 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3430 }
3431 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003432 __ B(codegen_->GetLabelOf(successor));
3433 }
3434}
3435
David Brazdilfc6a86a2015-06-26 10:33:45 +00003436void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3437 got->SetLocations(nullptr);
3438}
3439
3440void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3441 HandleGoto(got, got->GetSuccessor());
3442}
3443
3444void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3445 try_boundary->SetLocations(nullptr);
3446}
3447
3448void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3449 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3450 if (!successor->IsExitBlock()) {
3451 HandleGoto(try_boundary, successor);
3452 }
3453}
3454
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003455void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003456 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003457 vixl::aarch64::Label* true_target,
3458 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003459 // FP branching requires both targets to be explicit. If either of the targets
3460 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003461 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003462 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003463
David Brazdil0debae72015-11-12 18:37:00 +00003464 if (true_target == nullptr && false_target == nullptr) {
3465 // Nothing to do. The code always falls through.
3466 return;
3467 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003468 // Constant condition, statically compared against "true" (integer value 1).
3469 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003470 if (true_target != nullptr) {
3471 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003472 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003473 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003474 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003475 if (false_target != nullptr) {
3476 __ B(false_target);
3477 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003478 }
David Brazdil0debae72015-11-12 18:37:00 +00003479 return;
3480 }
3481
3482 // The following code generates these patterns:
3483 // (1) true_target == nullptr && false_target != nullptr
3484 // - opposite condition true => branch to false_target
3485 // (2) true_target != nullptr && false_target == nullptr
3486 // - condition true => branch to true_target
3487 // (3) true_target != nullptr && false_target != nullptr
3488 // - condition true => branch to true_target
3489 // - branch to false_target
3490 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003491 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003492 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003493 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003494 if (true_target == nullptr) {
3495 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3496 } else {
3497 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3498 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003499 } else {
3500 // The condition instruction has not been materialized, use its inputs as
3501 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003502 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003503
David Brazdil0debae72015-11-12 18:37:00 +00003504 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00003505 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003506 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003507 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003508 IfCondition opposite_condition = condition->GetOppositeCondition();
3509 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003510 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003511 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003512 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003513 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003514 // Integer cases.
3515 Register lhs = InputRegisterAt(condition, 0);
3516 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003517
3518 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003519 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003520 if (true_target == nullptr) {
3521 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3522 non_fallthrough_target = false_target;
3523 } else {
3524 arm64_cond = ARM64Condition(condition->GetCondition());
3525 non_fallthrough_target = true_target;
3526 }
3527
Aart Bik086d27e2016-01-20 17:02:00 -08003528 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003529 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003530 switch (arm64_cond) {
3531 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003532 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003533 break;
3534 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003535 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003536 break;
3537 case lt:
3538 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003539 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003540 break;
3541 case ge:
3542 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003543 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003544 break;
3545 default:
3546 // Without the `static_cast` the compiler throws an error for
3547 // `-Werror=sign-promo`.
3548 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3549 }
3550 } else {
3551 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003552 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003553 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003554 }
3555 }
David Brazdil0debae72015-11-12 18:37:00 +00003556
3557 // If neither branch falls through (case 3), the conditional branch to `true_target`
3558 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3559 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003560 __ B(false_target);
3561 }
David Brazdil0debae72015-11-12 18:37:00 +00003562
3563 if (fallthrough_target.IsLinked()) {
3564 __ Bind(&fallthrough_target);
3565 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003566}
3567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003568void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003570 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003571 locations->SetInAt(0, Location::RequiresRegister());
3572 }
3573}
3574
3575void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003576 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3577 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003578 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3579 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3580 true_target = nullptr;
3581 }
3582 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3583 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3584 false_target = nullptr;
3585 }
David Brazdil0debae72015-11-12 18:37:00 +00003586 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003587}
3588
3589void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3590 LocationSummary* locations = new (GetGraph()->GetArena())
3591 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003592 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003593 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003594 locations->SetInAt(0, Location::RequiresRegister());
3595 }
3596}
3597
3598void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003599 SlowPathCodeARM64* slow_path =
3600 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003601 GenerateTestAndBranch(deoptimize,
3602 /* condition_input_index */ 0,
3603 slow_path->GetEntryLabel(),
3604 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003605}
3606
Mingyao Yang063fc772016-08-02 11:02:54 -07003607void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3608 LocationSummary* locations = new (GetGraph()->GetArena())
3609 LocationSummary(flag, LocationSummary::kNoCall);
3610 locations->SetOut(Location::RequiresRegister());
3611}
3612
3613void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3614 __ Ldr(OutputRegister(flag),
3615 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3616}
3617
David Brazdilc0b601b2016-02-08 14:20:45 +00003618static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3619 return condition->IsCondition() &&
3620 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3621}
3622
Alexandre Rames880f1192016-06-13 16:04:50 +01003623static inline Condition GetConditionForSelect(HCondition* condition) {
3624 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003625 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3626 : ARM64Condition(cond);
3627}
3628
David Brazdil74eb1b22015-12-14 11:44:01 +00003629void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3630 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003631 if (Primitive::IsFloatingPointType(select->GetType())) {
3632 locations->SetInAt(0, Location::RequiresFpuRegister());
3633 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003634 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003635 } else {
3636 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3637 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3638 bool is_true_value_constant = cst_true_value != nullptr;
3639 bool is_false_value_constant = cst_false_value != nullptr;
3640 // Ask VIXL whether we should synthesize constants in registers.
3641 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3642 Operand true_op = is_true_value_constant ?
3643 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3644 Operand false_op = is_false_value_constant ?
3645 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3646 bool true_value_in_register = false;
3647 bool false_value_in_register = false;
3648 MacroAssembler::GetCselSynthesisInformation(
3649 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3650 true_value_in_register |= !is_true_value_constant;
3651 false_value_in_register |= !is_false_value_constant;
3652
3653 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3654 : Location::ConstantLocation(cst_true_value));
3655 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3656 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003657 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003658 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003659
David Brazdil74eb1b22015-12-14 11:44:01 +00003660 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3661 locations->SetInAt(2, Location::RequiresRegister());
3662 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003663}
3664
3665void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003666 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003667 Condition csel_cond;
3668
3669 if (IsBooleanValueOrMaterializedCondition(cond)) {
3670 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003671 // Use the condition flags set by the previous instruction.
3672 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003673 } else {
3674 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003675 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003676 }
3677 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003678 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003679 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003680 } else {
3681 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003682 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003683 }
3684
Alexandre Rames880f1192016-06-13 16:04:50 +01003685 if (Primitive::IsFloatingPointType(select->GetType())) {
3686 __ Fcsel(OutputFPRegister(select),
3687 InputFPRegisterAt(select, 1),
3688 InputFPRegisterAt(select, 0),
3689 csel_cond);
3690 } else {
3691 __ Csel(OutputRegister(select),
3692 InputOperandAt(select, 1),
3693 InputOperandAt(select, 0),
3694 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003695 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003696}
3697
David Srbecky0cf44932015-12-09 14:09:59 +00003698void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3699 new (GetGraph()->GetArena()) LocationSummary(info);
3700}
3701
David Srbeckyd28f4a02016-03-14 17:14:24 +00003702void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3703 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003704}
3705
3706void CodeGeneratorARM64::GenerateNop() {
3707 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003708}
3709
Alexandre Rames5319def2014-10-23 10:03:10 +01003710void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003711 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003712}
3713
3714void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003715 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003716}
3717
3718void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003719 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003720}
3721
3722void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003723 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003724}
3725
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003726// Temp is used for read barrier.
3727static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3728 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003729 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003730 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3731 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3732 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3733 return 1;
3734 }
3735 return 0;
3736}
3737
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003738// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003739// interface pointer, one for loading the current interface.
3740// The other checks have one temp for loading the object's class.
3741static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3742 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3743 return 3;
3744 }
3745 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003746}
3747
Alexandre Rames67555f72014-11-18 10:55:16 +00003748void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003749 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003750 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003751 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003752 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003753 case TypeCheckKind::kExactCheck:
3754 case TypeCheckKind::kAbstractClassCheck:
3755 case TypeCheckKind::kClassHierarchyCheck:
3756 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003757 call_kind =
3758 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003759 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003760 break;
3761 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003762 case TypeCheckKind::kUnresolvedCheck:
3763 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003764 call_kind = LocationSummary::kCallOnSlowPath;
3765 break;
3766 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003767
Alexandre Rames67555f72014-11-18 10:55:16 +00003768 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003769 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003770 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003771 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003772 locations->SetInAt(0, Location::RequiresRegister());
3773 locations->SetInAt(1, Location::RequiresRegister());
3774 // The "out" register is used as a temporary, so it overlaps with the inputs.
3775 // Note that TypeCheckSlowPathARM64 uses this register too.
3776 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003777 // Add temps if necessary for read barriers.
3778 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003779}
3780
3781void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003782 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003783 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003784 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003785 Register obj = InputRegisterAt(instruction, 0);
3786 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003787 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003788 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003789 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3790 DCHECK_LE(num_temps, 1u);
3791 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003792 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3793 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3794 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3795 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003796
Scott Wakeling97c72b72016-06-24 16:19:36 +01003797 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003798 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003799
3800 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003801 // Avoid null check if we know `obj` is not null.
3802 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003803 __ Cbz(obj, &zero);
3804 }
3805
Roland Levillain44015862016-01-22 11:47:17 +00003806 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003807 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003808 // /* HeapReference<Class> */ out = obj->klass_
3809 GenerateReferenceLoadTwoRegisters(instruction,
3810 out_loc,
3811 obj_loc,
3812 class_offset,
3813 maybe_temp_loc,
3814 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003815 __ Cmp(out, cls);
3816 __ Cset(out, eq);
3817 if (zero.IsLinked()) {
3818 __ B(&done);
3819 }
3820 break;
3821 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003822
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003823 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003824 // /* HeapReference<Class> */ out = obj->klass_
3825 GenerateReferenceLoadTwoRegisters(instruction,
3826 out_loc,
3827 obj_loc,
3828 class_offset,
3829 maybe_temp_loc,
3830 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003831 // If the class is abstract, we eagerly fetch the super class of the
3832 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003833 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003834 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003835 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003836 GenerateReferenceLoadOneRegister(instruction,
3837 out_loc,
3838 super_offset,
3839 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003840 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003841 // If `out` is null, we use it for the result, and jump to `done`.
3842 __ Cbz(out, &done);
3843 __ Cmp(out, cls);
3844 __ B(ne, &loop);
3845 __ Mov(out, 1);
3846 if (zero.IsLinked()) {
3847 __ B(&done);
3848 }
3849 break;
3850 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003851
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003852 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003853 // /* HeapReference<Class> */ out = obj->klass_
3854 GenerateReferenceLoadTwoRegisters(instruction,
3855 out_loc,
3856 obj_loc,
3857 class_offset,
3858 maybe_temp_loc,
3859 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003860 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003861 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003862 __ Bind(&loop);
3863 __ Cmp(out, cls);
3864 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003865 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003866 GenerateReferenceLoadOneRegister(instruction,
3867 out_loc,
3868 super_offset,
3869 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003870 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003871 __ Cbnz(out, &loop);
3872 // If `out` is null, we use it for the result, and jump to `done`.
3873 __ B(&done);
3874 __ Bind(&success);
3875 __ Mov(out, 1);
3876 if (zero.IsLinked()) {
3877 __ B(&done);
3878 }
3879 break;
3880 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003881
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003882 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003883 // /* HeapReference<Class> */ out = obj->klass_
3884 GenerateReferenceLoadTwoRegisters(instruction,
3885 out_loc,
3886 obj_loc,
3887 class_offset,
3888 maybe_temp_loc,
3889 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003890 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003891 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003892 __ Cmp(out, cls);
3893 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003894 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003895 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003896 GenerateReferenceLoadOneRegister(instruction,
3897 out_loc,
3898 component_offset,
3899 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003900 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003901 // If `out` is null, we use it for the result, and jump to `done`.
3902 __ Cbz(out, &done);
3903 __ Ldrh(out, HeapOperand(out, primitive_offset));
3904 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3905 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003906 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003907 __ Mov(out, 1);
3908 __ B(&done);
3909 break;
3910 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003911
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003912 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003913 // No read barrier since the slow path will retry upon failure.
3914 // /* HeapReference<Class> */ out = obj->klass_
3915 GenerateReferenceLoadTwoRegisters(instruction,
3916 out_loc,
3917 obj_loc,
3918 class_offset,
3919 maybe_temp_loc,
3920 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003921 __ Cmp(out, cls);
3922 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003923 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3924 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003925 codegen_->AddSlowPath(slow_path);
3926 __ B(ne, slow_path->GetEntryLabel());
3927 __ Mov(out, 1);
3928 if (zero.IsLinked()) {
3929 __ B(&done);
3930 }
3931 break;
3932 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003933
Calin Juravle98893e12015-10-02 21:05:03 +01003934 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003935 case TypeCheckKind::kInterfaceCheck: {
3936 // Note that we indeed only call on slow path, but we always go
3937 // into the slow path for the unresolved and interface check
3938 // cases.
3939 //
3940 // We cannot directly call the InstanceofNonTrivial runtime
3941 // entry point without resorting to a type checking slow path
3942 // here (i.e. by calling InvokeRuntime directly), as it would
3943 // require to assign fixed registers for the inputs of this
3944 // HInstanceOf instruction (following the runtime calling
3945 // convention), which might be cluttered by the potential first
3946 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003947 //
3948 // TODO: Introduce a new runtime entry point taking the object
3949 // to test (instead of its class) as argument, and let it deal
3950 // with the read barrier issues. This will let us refactor this
3951 // case of the `switch` code as it was previously (with a direct
3952 // call to the runtime not using a type checking slow path).
3953 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003954 DCHECK(locations->OnlyCallsOnSlowPath());
3955 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3956 /* is_fatal */ false);
3957 codegen_->AddSlowPath(slow_path);
3958 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003959 if (zero.IsLinked()) {
3960 __ B(&done);
3961 }
3962 break;
3963 }
3964 }
3965
3966 if (zero.IsLinked()) {
3967 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003968 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003969 }
3970
3971 if (done.IsLinked()) {
3972 __ Bind(&done);
3973 }
3974
3975 if (slow_path != nullptr) {
3976 __ Bind(slow_path->GetExitLabel());
3977 }
3978}
3979
3980void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3981 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3982 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3983
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003984 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3985 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003986 case TypeCheckKind::kExactCheck:
3987 case TypeCheckKind::kAbstractClassCheck:
3988 case TypeCheckKind::kClassHierarchyCheck:
3989 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003990 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3991 LocationSummary::kCallOnSlowPath :
3992 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003993 break;
3994 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003995 case TypeCheckKind::kUnresolvedCheck:
3996 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003997 call_kind = LocationSummary::kCallOnSlowPath;
3998 break;
3999 }
4000
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004001 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4002 locations->SetInAt(0, Location::RequiresRegister());
4003 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004004 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4005 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004006}
4007
4008void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004009 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004010 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004011 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004012 Register obj = InputRegisterAt(instruction, 0);
4013 Register cls = InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004014 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4015 DCHECK_GE(num_temps, 1u);
4016 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004017 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004018 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4019 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004020 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004021 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4022 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4023 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4024 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4025 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4026 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4027 const uint32_t object_array_data_offset =
4028 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004029
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004030 bool is_type_check_slow_path_fatal = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004031 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
4032 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
4033 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004034 if (!kEmitCompilerReadBarrier) {
4035 is_type_check_slow_path_fatal =
4036 (type_check_kind == TypeCheckKind::kExactCheck ||
4037 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
4038 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
4039 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
4040 !instruction->CanThrowIntoCatchBlock();
4041 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004042 SlowPathCodeARM64* type_check_slow_path =
4043 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
4044 is_type_check_slow_path_fatal);
4045 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004046
Scott Wakeling97c72b72016-06-24 16:19:36 +01004047 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004048 // Avoid null check if we know obj is not null.
4049 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004050 __ Cbz(obj, &done);
4051 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004052
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004053 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004054 case TypeCheckKind::kExactCheck:
4055 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004056 // /* HeapReference<Class> */ temp = obj->klass_
4057 GenerateReferenceLoadTwoRegisters(instruction,
4058 temp_loc,
4059 obj_loc,
4060 class_offset,
4061 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004062 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004063
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004064 __ Cmp(temp, cls);
4065 // Jump to slow path for throwing the exception or doing a
4066 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004067 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004068 break;
4069 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004070
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004071 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004072 // /* HeapReference<Class> */ temp = obj->klass_
4073 GenerateReferenceLoadTwoRegisters(instruction,
4074 temp_loc,
4075 obj_loc,
4076 class_offset,
4077 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004078 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004079
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004080 // If the class is abstract, we eagerly fetch the super class of the
4081 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004082 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004083 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004084 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004085 GenerateReferenceLoadOneRegister(instruction,
4086 temp_loc,
4087 super_offset,
4088 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004089 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004090
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004091 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4092 // exception.
4093 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4094 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004095 __ Cmp(temp, cls);
4096 __ B(ne, &loop);
4097 break;
4098 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004099
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004100 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004101 // /* HeapReference<Class> */ temp = obj->klass_
4102 GenerateReferenceLoadTwoRegisters(instruction,
4103 temp_loc,
4104 obj_loc,
4105 class_offset,
4106 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004107 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004108
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004109 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004110 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004111 __ Bind(&loop);
4112 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004113 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004114
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004115 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004116 GenerateReferenceLoadOneRegister(instruction,
4117 temp_loc,
4118 super_offset,
4119 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004120 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004121
4122 // If the class reference currently in `temp` is not null, jump
4123 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004124 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004125 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004126 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004127 break;
4128 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004129
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004130 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004131 // /* HeapReference<Class> */ temp = obj->klass_
4132 GenerateReferenceLoadTwoRegisters(instruction,
4133 temp_loc,
4134 obj_loc,
4135 class_offset,
4136 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004137 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004138
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004139 // Do an exact check.
4140 __ Cmp(temp, cls);
4141 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004142
4143 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004144 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004145 GenerateReferenceLoadOneRegister(instruction,
4146 temp_loc,
4147 component_offset,
4148 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004149 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004150
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004151 // If the component type is null, jump to the slow path to throw the exception.
4152 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4153 // Otherwise, the object is indeed an array. Further check that this component type is not a
4154 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004155 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4156 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004157 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004158 break;
4159 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004160
Calin Juravle98893e12015-10-02 21:05:03 +01004161 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004162 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004163 //
4164 // We cannot directly call the CheckCast runtime entry point
4165 // without resorting to a type checking slow path here (i.e. by
4166 // calling InvokeRuntime directly), as it would require to
4167 // assign fixed registers for the inputs of this HInstanceOf
4168 // instruction (following the runtime calling convention), which
4169 // might be cluttered by the potential first read barrier
4170 // emission at the beginning of this method.
4171 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004172 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004173 case TypeCheckKind::kInterfaceCheck: {
4174 // /* HeapReference<Class> */ temp = obj->klass_
4175 GenerateReferenceLoadTwoRegisters(instruction,
4176 temp_loc,
4177 obj_loc,
4178 class_offset,
4179 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004180 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004181
4182 // /* HeapReference<Class> */ temp = temp->iftable_
4183 GenerateReferenceLoadTwoRegisters(instruction,
4184 temp_loc,
4185 temp_loc,
4186 iftable_offset,
4187 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004188 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004189 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004190 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004191 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004192 vixl::aarch64::Label start_loop;
4193 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004194 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004195 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4196 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004197 // Go to next interface.
4198 __ Add(temp, temp, 2 * kHeapReferenceSize);
4199 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004200 // Compare the classes and continue the loop if they do not match.
4201 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4202 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004203 break;
4204 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004205 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004206 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004207
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004208 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004209}
4210
Alexandre Rames5319def2014-10-23 10:03:10 +01004211void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
4212 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4213 locations->SetOut(Location::ConstantLocation(constant));
4214}
4215
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004216void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004217 // Will be generated at use site.
4218}
4219
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004220void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
4221 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4222 locations->SetOut(Location::ConstantLocation(constant));
4223}
4224
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004225void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004226 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004227}
4228
Calin Juravle175dc732015-08-25 15:42:32 +01004229void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4230 // The trampoline uses the same calling convention as dex calling conventions,
4231 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4232 // the method_idx.
4233 HandleInvoke(invoke);
4234}
4235
4236void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4237 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4238}
4239
Alexandre Rames5319def2014-10-23 10:03:10 +01004240void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004241 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004242 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004243}
4244
Alexandre Rames67555f72014-11-18 10:55:16 +00004245void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4246 HandleInvoke(invoke);
4247}
4248
4249void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4250 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004251 LocationSummary* locations = invoke->GetLocations();
4252 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004253 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004254 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004255 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004256
4257 // The register ip1 is required to be used for the hidden argument in
4258 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004259 MacroAssembler* masm = GetVIXLAssembler();
4260 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004261 scratch_scope.Exclude(ip1);
4262 __ Mov(ip1, invoke->GetDexMethodIndex());
4263
Artem Serov914d7a82017-02-07 14:33:49 +00004264 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004265 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004266 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004267 {
4268 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4269 // /* HeapReference<Class> */ temp = temp->klass_
4270 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4271 codegen_->MaybeRecordImplicitNullCheck(invoke);
4272 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004273 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004274 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004275 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004276 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004277 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004278 }
Artem Serov914d7a82017-02-07 14:33:49 +00004279
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004280 // Instead of simply (possibly) unpoisoning `temp` here, we should
4281 // emit a read barrier for the previous class reference load.
4282 // However this is not required in practice, as this is an
4283 // intermediate/temporary reference and because the current
4284 // concurrent copying collector keeps the from-space memory
4285 // intact/accessible until the end of the marking phase (the
4286 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004287 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004288 __ Ldr(temp,
4289 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4290 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004291 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004292 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004293 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004294 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004295 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004296
4297 {
4298 // Ensure the pc position is recorded immediately after the `blr` instruction.
4299 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4300
4301 // lr();
4302 __ blr(lr);
4303 DCHECK(!codegen_->IsLeafMethod());
4304 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4305 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004306}
4307
4308void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +00004309 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004310 if (intrinsic.TryDispatch(invoke)) {
4311 return;
4312 }
4313
Alexandre Rames67555f72014-11-18 10:55:16 +00004314 HandleInvoke(invoke);
4315}
4316
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004317void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004318 // Explicit clinit checks triggered by static invokes must have been pruned by
4319 // art::PrepareForRegisterAllocation.
4320 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004321
Nicolas Geoffray331605a2017-03-01 11:01:41 +00004322 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004323 if (intrinsic.TryDispatch(invoke)) {
4324 return;
4325 }
4326
Alexandre Rames67555f72014-11-18 10:55:16 +00004327 HandleInvoke(invoke);
4328}
4329
Andreas Gampe878d58c2015-01-15 23:24:00 -08004330static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4331 if (invoke->GetLocations()->Intrinsified()) {
4332 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4333 intrinsic.Dispatch(invoke);
4334 return true;
4335 }
4336 return false;
4337}
4338
Vladimir Markodc151b22015-10-15 18:02:30 +01004339HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4340 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004341 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004342 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004343 return desired_dispatch_info;
4344}
4345
TatWai Chongd8c052a2016-11-02 16:12:48 +08004346Location CodeGeneratorARM64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4347 Location temp) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004348 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004349 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4350 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004351 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4352 uint32_t offset =
4353 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004354 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004355 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004356 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004357 }
Vladimir Marko58155012015-08-19 12:49:41 +00004358 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004359 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004360 break;
4361 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4362 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004363 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004364 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004365 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4366 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00004367 const DexFile& dex_file = invoke->GetDexFileForPcRelativeDexCache();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004368 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004369 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004370 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004371 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004372 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004373 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004374 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004375 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004376 }
Vladimir Marko58155012015-08-19 12:49:41 +00004377 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004378 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004379 Register reg = XRegisterFrom(temp);
4380 Register method_reg;
4381 if (current_method.IsRegister()) {
4382 method_reg = XRegisterFrom(current_method);
4383 } else {
4384 DCHECK(invoke->GetLocations()->Intrinsified());
4385 DCHECK(!current_method.IsValid());
4386 method_reg = reg;
4387 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
4388 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00004389
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004390 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004391 __ Ldr(reg.X(),
4392 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07004393 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004394 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01004395 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4396 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004397 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
4398 break;
4399 }
4400 }
TatWai Chongd8c052a2016-11-02 16:12:48 +08004401 return callee_method;
4402}
4403
4404void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4405 // All registers are assumed to be correctly set up.
4406 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004407
4408 switch (invoke->GetCodePtrLocation()) {
4409 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4410 __ Bl(&frame_entry_label_);
4411 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004412 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4413 // LR = callee_method->entry_point_from_quick_compiled_code_;
4414 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004415 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004416 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004417 {
4418 // To ensure that the pc position is recorded immediately after the `blr` instruction
4419 // BLR must be the last instruction emitted in this function.
4420 // Recording the pc will occur right after returning from this function.
4421 ExactAssemblyScope eas(GetVIXLAssembler(),
4422 kInstructionSize,
4423 CodeBufferCheckScope::kExactSize);
4424 // lr()
4425 __ blr(lr);
4426 }
Vladimir Marko58155012015-08-19 12:49:41 +00004427 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004428 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004429
Andreas Gampe878d58c2015-01-15 23:24:00 -08004430 DCHECK(!IsLeafMethod());
4431}
4432
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004433void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004434 // Use the calling convention instead of the location of the receiver, as
4435 // intrinsics may have put the receiver in a different register. In the intrinsics
4436 // slow path, the arguments have been moved to the right place, so here we are
4437 // guaranteed that the receiver is the first register of the calling convention.
4438 InvokeDexCallingConvention calling_convention;
4439 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004440 Register temp = XRegisterFrom(temp_in);
4441 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4442 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4443 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004444 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004445
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004446 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004447
4448 {
4449 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4450 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4451 // /* HeapReference<Class> */ temp = receiver->klass_
4452 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4453 MaybeRecordImplicitNullCheck(invoke);
4454 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004455 // Instead of simply (possibly) unpoisoning `temp` here, we should
4456 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004457 // intermediate/temporary reference and because the current
4458 // concurrent copying collector keeps the from-space memory
4459 // intact/accessible until the end of the marking phase (the
4460 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004461 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4462 // temp = temp->GetMethodAt(method_offset);
4463 __ Ldr(temp, MemOperand(temp, method_offset));
4464 // lr = temp->GetEntryPoint();
4465 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004466 {
4467 // To ensure that the pc position is recorded immediately after the `blr` instruction
4468 // BLR should be the last instruction emitted in this function.
4469 // Recording the pc will occur right after returning from this function.
4470 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4471 // lr();
4472 __ blr(lr);
4473 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004474}
4475
Orion Hodsonac141392017-01-13 11:53:47 +00004476void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4477 HandleInvoke(invoke);
4478}
4479
4480void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4481 codegen_->GenerateInvokePolymorphicCall(invoke);
4482}
4483
Scott Wakeling97c72b72016-06-24 16:19:36 +01004484vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
4485 const DexFile& dex_file,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004486 dex::StringIndex string_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004487 vixl::aarch64::Label* adrp_label) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004488 return
4489 NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004490}
4491
Scott Wakeling97c72b72016-06-24 16:19:36 +01004492vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
4493 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004494 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004495 vixl::aarch64::Label* adrp_label) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004496 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004497}
4498
Vladimir Marko1998cd02017-01-13 13:02:58 +00004499vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4500 const DexFile& dex_file,
4501 dex::TypeIndex type_index,
4502 vixl::aarch64::Label* adrp_label) {
4503 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
4504}
4505
Scott Wakeling97c72b72016-06-24 16:19:36 +01004506vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
4507 const DexFile& dex_file,
4508 uint32_t element_offset,
4509 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004510 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
4511}
4512
Scott Wakeling97c72b72016-06-24 16:19:36 +01004513vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
4514 const DexFile& dex_file,
4515 uint32_t offset_or_index,
4516 vixl::aarch64::Label* adrp_label,
4517 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004518 // Add a patch entry and return the label.
4519 patches->emplace_back(dex_file, offset_or_index);
4520 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004521 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004522 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4523 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4524 return label;
4525}
4526
Scott Wakeling97c72b72016-06-24 16:19:36 +01004527vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004528 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004529 return boot_image_string_patches_.GetOrCreate(
4530 StringReference(&dex_file, string_index),
4531 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4532}
4533
Scott Wakeling97c72b72016-06-24 16:19:36 +01004534vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Andreas Gampea5b09a62016-11-17 15:21:22 -08004535 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004536 return boot_image_type_patches_.GetOrCreate(
4537 TypeReference(&dex_file, type_index),
4538 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4539}
4540
Scott Wakeling97c72b72016-06-24 16:19:36 +01004541vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4542 uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00004543 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004544}
4545
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004546vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004547 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
4548 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
4549 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004550 return jit_string_patches_.GetOrCreate(
4551 StringReference(&dex_file, string_index),
4552 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4553}
4554
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004555vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004556 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
4557 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
4558 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004559 return jit_class_patches_.GetOrCreate(
4560 TypeReference(&dex_file, type_index),
4561 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4562}
4563
Vladimir Markoaad75c62016-10-03 08:46:48 +00004564void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4565 vixl::aarch64::Register reg) {
4566 DCHECK(reg.IsX());
4567 SingleEmissionCheckScope guard(GetVIXLAssembler());
4568 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004569 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004570}
4571
4572void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4573 vixl::aarch64::Register out,
4574 vixl::aarch64::Register base) {
4575 DCHECK(out.IsX());
4576 DCHECK(base.IsX());
4577 SingleEmissionCheckScope guard(GetVIXLAssembler());
4578 __ Bind(fixup_label);
4579 __ add(out, base, Operand(/* offset placeholder */ 0));
4580}
4581
4582void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4583 vixl::aarch64::Register out,
4584 vixl::aarch64::Register base) {
4585 DCHECK(base.IsX());
4586 SingleEmissionCheckScope guard(GetVIXLAssembler());
4587 __ Bind(fixup_label);
4588 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4589}
4590
4591template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4592inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4593 const ArenaDeque<PcRelativePatchInfo>& infos,
4594 ArenaVector<LinkerPatch>* linker_patches) {
4595 for (const PcRelativePatchInfo& info : infos) {
4596 linker_patches->push_back(Factory(info.label.GetLocation(),
4597 &info.target_dex_file,
4598 info.pc_insn_label->GetLocation(),
4599 info.offset_or_index));
4600 }
4601}
4602
Vladimir Marko58155012015-08-19 12:49:41 +00004603void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4604 DCHECK(linker_patches->empty());
4605 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004606 pc_relative_dex_cache_patches_.size() +
4607 boot_image_string_patches_.size() +
4608 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004609 boot_image_type_patches_.size() +
4610 pc_relative_type_patches_.size() +
Richard Uhlerc52f3032017-03-02 13:45:45 +00004611 type_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004612 linker_patches->reserve(size);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004613 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004614 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00004615 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004616 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004617 info.offset_or_index));
4618 }
4619 for (const auto& entry : boot_image_string_patches_) {
4620 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004621 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4622 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004623 target_string.dex_file,
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004624 target_string.string_index.index_));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004625 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004626 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004627 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004628 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
4629 linker_patches);
4630 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004631 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
4632 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004633 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
4634 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004635 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004636 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4637 linker_patches);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004638 for (const auto& entry : boot_image_type_patches_) {
4639 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004640 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4641 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004642 target_type.dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004643 target_type.type_index.index_));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004644 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004645 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004646}
4647
Scott Wakeling97c72b72016-06-24 16:19:36 +01004648vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004649 Uint32ToLiteralMap* map) {
4650 return map->GetOrCreate(
4651 value,
4652 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4653}
4654
Scott Wakeling97c72b72016-06-24 16:19:36 +01004655vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004656 return uint64_literals_.GetOrCreate(
4657 value,
4658 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004659}
4660
Scott Wakeling97c72b72016-06-24 16:19:36 +01004661vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00004662 MethodReference target_method,
4663 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004664 return map->GetOrCreate(
4665 target_method,
4666 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00004667}
4668
Andreas Gampe878d58c2015-01-15 23:24:00 -08004669void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004670 // Explicit clinit checks triggered by static invokes must have been pruned by
4671 // art::PrepareForRegisterAllocation.
4672 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004673
Andreas Gampe878d58c2015-01-15 23:24:00 -08004674 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4675 return;
4676 }
4677
Artem Serov914d7a82017-02-07 14:33:49 +00004678 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4679 // are no pools emitted.
4680 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004681 LocationSummary* locations = invoke->GetLocations();
4682 codegen_->GenerateStaticOrDirectCall(
4683 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004684 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004685}
4686
4687void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004688 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4689 return;
4690 }
4691
Artem Serov914d7a82017-02-07 14:33:49 +00004692 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4693 // are no pools emitted.
4694 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004695 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004696 DCHECK(!codegen_->IsLeafMethod());
4697 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4698}
4699
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004700HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4701 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004702 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004703 case HLoadClass::LoadKind::kInvalid:
4704 LOG(FATAL) << "UNREACHABLE";
4705 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004706 case HLoadClass::LoadKind::kReferrersClass:
4707 break;
4708 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4709 DCHECK(!GetCompilerOptions().GetCompilePic());
4710 break;
4711 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4712 DCHECK(GetCompilerOptions().GetCompilePic());
4713 break;
4714 case HLoadClass::LoadKind::kBootImageAddress:
4715 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004716 case HLoadClass::LoadKind::kBssEntry:
4717 DCHECK(!Runtime::Current()->UseJitCompilation());
4718 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004719 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004720 DCHECK(Runtime::Current()->UseJitCompilation());
4721 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004722 case HLoadClass::LoadKind::kDexCacheViaMethod:
4723 break;
4724 }
4725 return desired_class_load_kind;
4726}
4727
Alexandre Rames67555f72014-11-18 10:55:16 +00004728void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004729 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4730 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004731 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004732 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004733 cls,
4734 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004735 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004736 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004737 return;
4738 }
Vladimir Marko41559982017-01-06 14:04:23 +00004739 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004740
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004741 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4742 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004743 ? LocationSummary::kCallOnSlowPath
4744 : LocationSummary::kNoCall;
4745 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004746 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004747 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004748 }
4749
Vladimir Marko41559982017-01-06 14:04:23 +00004750 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004751 locations->SetInAt(0, Location::RequiresRegister());
4752 }
4753 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004754 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4755 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4756 // Rely on the type resolution or initialization and marking to save everything we need.
4757 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4758 // to the custom calling convention) or by marking, so we shall use IP1.
4759 RegisterSet caller_saves = RegisterSet::Empty();
4760 InvokeRuntimeCallingConvention calling_convention;
4761 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4762 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4763 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4764 Primitive::kPrimNot).GetCode());
4765 locations->SetCustomSlowPathCallerSaves(caller_saves);
4766 } else {
4767 // For non-Baker read barrier we have a temp-clobbering call.
4768 }
4769 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004770}
4771
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004772// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4773// move.
4774void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004775 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4776 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4777 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01004778 return;
4779 }
Vladimir Marko41559982017-01-06 14:04:23 +00004780 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004781
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004782 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004783 Register out = OutputRegister(cls);
Vladimir Markoea4c1262017-02-06 19:59:33 +00004784 Register bss_entry_temp;
4785 vixl::aarch64::Label* bss_entry_adrp_label = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00004786
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004787 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4788 ? kWithoutReadBarrier
4789 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004790 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004791 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004792 case HLoadClass::LoadKind::kReferrersClass: {
4793 DCHECK(!cls->CanCallRuntime());
4794 DCHECK(!cls->MustGenerateClinitCheck());
4795 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4796 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004797 GenerateGcRootFieldLoad(cls,
4798 out_loc,
4799 current_method,
4800 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004801 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004802 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004803 break;
4804 }
4805 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004806 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004807 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4808 cls->GetTypeIndex()));
4809 break;
4810 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004811 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004812 // Add ADRP with its PC-relative type patch.
4813 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004814 dex::TypeIndex type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004815 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004816 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004817 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004818 vixl::aarch64::Label* add_label =
4819 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004820 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004821 break;
4822 }
4823 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004824 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004825 uint32_t address = dchecked_integral_cast<uint32_t>(
4826 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
4827 DCHECK_NE(address, 0u);
4828 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004829 break;
4830 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004831 case HLoadClass::LoadKind::kBssEntry: {
4832 // Add ADRP with its PC-relative Class .bss entry patch.
4833 const DexFile& dex_file = cls->GetDexFile();
4834 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markoea4c1262017-02-06 19:59:33 +00004835 // We can go to slow path even with non-zero reference and in that case marking
4836 // can clobber IP0, so we need to use IP1 which shall be preserved.
4837 bss_entry_temp = ip1;
4838 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4839 temps.Exclude(bss_entry_temp);
4840 bss_entry_adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4841 codegen_->EmitAdrpPlaceholder(bss_entry_adrp_label, bss_entry_temp);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004842 // Add LDR with its PC-relative Class patch.
4843 vixl::aarch64::Label* ldr_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +00004844 codegen_->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004845 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4846 GenerateGcRootFieldLoad(cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004847 out_loc,
4848 bss_entry_temp,
4849 /* offset placeholder */ 0u,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004850 ldr_label,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004851 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004852 generate_null_check = true;
4853 break;
4854 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004855 case HLoadClass::LoadKind::kJitTableAddress: {
4856 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4857 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004858 cls->GetClass()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004859 GenerateGcRootFieldLoad(cls,
4860 out_loc,
4861 out.X(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004862 /* offset */ 0,
Roland Levillain00468f32016-10-27 18:02:48 +01004863 /* fixup_label */ nullptr,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004864 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004865 break;
4866 }
Vladimir Marko41559982017-01-06 14:04:23 +00004867 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004868 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004869 LOG(FATAL) << "UNREACHABLE";
4870 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004871 }
4872
Vladimir Markoea4c1262017-02-06 19:59:33 +00004873 bool do_clinit = cls->MustGenerateClinitCheck();
4874 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004875 DCHECK(cls->CanCallRuntime());
4876 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
Vladimir Markoea4c1262017-02-06 19:59:33 +00004877 cls, cls, cls->GetDexPc(), do_clinit, bss_entry_temp, bss_entry_adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004878 codegen_->AddSlowPath(slow_path);
4879 if (generate_null_check) {
4880 __ Cbz(out, slow_path->GetEntryLabel());
4881 }
4882 if (cls->MustGenerateClinitCheck()) {
4883 GenerateClassInitializationCheck(slow_path, out);
4884 } else {
4885 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004886 }
4887 }
4888}
4889
David Brazdilcb1c0552015-08-04 16:22:25 +01004890static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004891 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004892}
4893
Alexandre Rames67555f72014-11-18 10:55:16 +00004894void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4895 LocationSummary* locations =
4896 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4897 locations->SetOut(Location::RequiresRegister());
4898}
4899
4900void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004901 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4902}
4903
4904void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4905 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4906}
4907
4908void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4909 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004910}
4911
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004912HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4913 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004914 switch (desired_string_load_kind) {
4915 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4916 DCHECK(!GetCompilerOptions().GetCompilePic());
4917 break;
4918 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4919 DCHECK(GetCompilerOptions().GetCompilePic());
4920 break;
4921 case HLoadString::LoadKind::kBootImageAddress:
4922 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004923 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004924 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004925 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004926 case HLoadString::LoadKind::kJitTableAddress:
4927 DCHECK(Runtime::Current()->UseJitCompilation());
4928 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004929 case HLoadString::LoadKind::kDexCacheViaMethod:
4930 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004931 }
4932 return desired_string_load_kind;
4933}
4934
Alexandre Rames67555f72014-11-18 10:55:16 +00004935void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004936 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004937 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004938 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004939 InvokeRuntimeCallingConvention calling_convention;
4940 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4941 } else {
4942 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004943 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4944 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004945 // Rely on the pResolveString and marking to save everything we need.
4946 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4947 // to the custom calling convention) or by marking, so we shall use IP1.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004948 RegisterSet caller_saves = RegisterSet::Empty();
4949 InvokeRuntimeCallingConvention calling_convention;
4950 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4951 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4952 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4953 Primitive::kPrimNot).GetCode());
4954 locations->SetCustomSlowPathCallerSaves(caller_saves);
4955 } else {
4956 // For non-Baker read barrier we have a temp-clobbering call.
4957 }
4958 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004959 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004960}
4961
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004962// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4963// move.
4964void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004965 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004966 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004967
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004968 switch (load->GetLoadKind()) {
4969 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004970 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4971 load->GetStringIndex()));
4972 return; // No dex cache slow path.
4973 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004974 // Add ADRP with its PC-relative String patch.
4975 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004976 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004977 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004978 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004979 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004980 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004981 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004982 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004983 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004984 return; // No dex cache slow path.
4985 }
4986 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004987 uint32_t address = dchecked_integral_cast<uint32_t>(
4988 reinterpret_cast<uintptr_t>(load->GetString().Get()));
4989 DCHECK_NE(address, 0u);
4990 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004991 return; // No dex cache slow path.
4992 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004993 case HLoadString::LoadKind::kBssEntry: {
4994 // Add ADRP with its PC-relative String .bss entry patch.
4995 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004996 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004997 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004998 // We could use IP0 as the marking shall not clobber IP0 if the reference is null and
4999 // that's when we need the slow path. But let's not rely on such details and use IP1.
5000 Register temp = ip1;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005001 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005002 temps.Exclude(temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005003 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005004 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005005 // Add LDR with its PC-relative String patch.
5006 vixl::aarch64::Label* ldr_label =
5007 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005008 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoaad75c62016-10-03 08:46:48 +00005009 GenerateGcRootFieldLoad(load,
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005010 out_loc,
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005011 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01005012 /* offset placeholder */ 0u,
5013 ldr_label,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005014 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005015 SlowPathCodeARM64* slow_path =
5016 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005017 codegen_->AddSlowPath(slow_path);
5018 __ Cbz(out.X(), slow_path->GetEntryLabel());
5019 __ Bind(slow_path->GetExitLabel());
5020 return;
5021 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005022 case HLoadString::LoadKind::kJitTableAddress: {
5023 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005024 load->GetStringIndex(),
5025 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005026 GenerateGcRootFieldLoad(load,
5027 out_loc,
5028 out.X(),
5029 /* offset */ 0,
5030 /* fixup_label */ nullptr,
5031 kCompilerReadBarrierOption);
5032 return;
5033 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005034 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005035 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005036 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005037
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005038 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005039 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005040 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005041 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005042 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5043 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005044}
5045
Alexandre Rames5319def2014-10-23 10:03:10 +01005046void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
5047 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5048 locations->SetOut(Location::ConstantLocation(constant));
5049}
5050
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005051void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005052 // Will be generated at use site.
5053}
5054
Alexandre Rames67555f72014-11-18 10:55:16 +00005055void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
5056 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005057 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005058 InvokeRuntimeCallingConvention calling_convention;
5059 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5060}
5061
5062void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005063 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005064 instruction,
5065 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005066 if (instruction->IsEnter()) {
5067 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5068 } else {
5069 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5070 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005071}
5072
Alexandre Rames42d641b2014-10-27 14:00:51 +00005073void LocationsBuilderARM64::VisitMul(HMul* mul) {
5074 LocationSummary* locations =
5075 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
5076 switch (mul->GetResultType()) {
5077 case Primitive::kPrimInt:
5078 case Primitive::kPrimLong:
5079 locations->SetInAt(0, Location::RequiresRegister());
5080 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005081 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005082 break;
5083
5084 case Primitive::kPrimFloat:
5085 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005086 locations->SetInAt(0, Location::RequiresFpuRegister());
5087 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005088 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005089 break;
5090
5091 default:
5092 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5093 }
5094}
5095
5096void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5097 switch (mul->GetResultType()) {
5098 case Primitive::kPrimInt:
5099 case Primitive::kPrimLong:
5100 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5101 break;
5102
5103 case Primitive::kPrimFloat:
5104 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005105 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005106 break;
5107
5108 default:
5109 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5110 }
5111}
5112
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005113void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5114 LocationSummary* locations =
5115 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
5116 switch (neg->GetResultType()) {
5117 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00005118 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005119 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005120 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005121 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005122
5123 case Primitive::kPrimFloat:
5124 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00005125 locations->SetInAt(0, Location::RequiresFpuRegister());
5126 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005127 break;
5128
5129 default:
5130 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5131 }
5132}
5133
5134void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5135 switch (neg->GetResultType()) {
5136 case Primitive::kPrimInt:
5137 case Primitive::kPrimLong:
5138 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5139 break;
5140
5141 case Primitive::kPrimFloat:
5142 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00005143 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005144 break;
5145
5146 default:
5147 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5148 }
5149}
5150
5151void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
5152 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005153 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005154 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005155 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005156 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5157 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005158}
5159
5160void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005161 // Note: if heap poisoning is enabled, the entry point takes cares
5162 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005163 QuickEntrypointEnum entrypoint =
5164 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5165 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005166 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005167}
5168
Alexandre Rames5319def2014-10-23 10:03:10 +01005169void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
5170 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005171 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005172 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00005173 if (instruction->IsStringAlloc()) {
5174 locations->AddTemp(LocationFrom(kArtMethodRegister));
5175 } else {
5176 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00005177 }
Alexandre Rames5319def2014-10-23 10:03:10 +01005178 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
5179}
5180
5181void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005182 // Note: if heap poisoning is enabled, the entry point takes cares
5183 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00005184 if (instruction->IsStringAlloc()) {
5185 // String is allocated through StringFactory. Call NewEmptyString entry point.
5186 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07005187 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00005188 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
5189 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00005190
5191 {
5192 // Ensure the pc position is recorded immediately after the `blr` instruction.
5193 ExactAssemblyScope eas(GetVIXLAssembler(),
5194 kInstructionSize,
5195 CodeBufferCheckScope::kExactSize);
5196 __ blr(lr);
5197 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
5198 }
David Brazdil6de19382016-01-08 17:37:10 +00005199 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005200 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00005201 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00005202 }
Alexandre Rames5319def2014-10-23 10:03:10 +01005203}
5204
5205void LocationsBuilderARM64::VisitNot(HNot* instruction) {
5206 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005207 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005208 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005209}
5210
5211void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005212 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005213 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01005214 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005215 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005216 break;
5217
5218 default:
5219 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5220 }
5221}
5222
David Brazdil66d126e2015-04-03 16:02:44 +01005223void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
5224 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5225 locations->SetInAt(0, Location::RequiresRegister());
5226 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5227}
5228
5229void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005230 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005231}
5232
Alexandre Rames5319def2014-10-23 10:03:10 +01005233void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005234 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5235 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005236}
5237
Calin Juravle2ae48182016-03-16 14:05:09 +00005238void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5239 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005240 return;
5241 }
Artem Serov914d7a82017-02-07 14:33:49 +00005242 {
5243 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5244 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5245 Location obj = instruction->GetLocations()->InAt(0);
5246 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5247 RecordPcInfo(instruction, instruction->GetDexPc());
5248 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005249}
5250
Calin Juravle2ae48182016-03-16 14:05:09 +00005251void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005252 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005253 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005254
5255 LocationSummary* locations = instruction->GetLocations();
5256 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005257
5258 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005259}
5260
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005261void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005262 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005263}
5264
Alexandre Rames67555f72014-11-18 10:55:16 +00005265void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5266 HandleBinaryOp(instruction);
5267}
5268
5269void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5270 HandleBinaryOp(instruction);
5271}
5272
Alexandre Rames3e69f162014-12-10 10:36:50 +00005273void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5274 LOG(FATAL) << "Unreachable";
5275}
5276
5277void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
5278 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5279}
5280
Alexandre Rames5319def2014-10-23 10:03:10 +01005281void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
5282 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5283 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5284 if (location.IsStackSlot()) {
5285 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5286 } else if (location.IsDoubleStackSlot()) {
5287 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5288 }
5289 locations->SetOut(location);
5290}
5291
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005292void InstructionCodeGeneratorARM64::VisitParameterValue(
5293 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005294 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005295}
5296
5297void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5298 LocationSummary* locations =
5299 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005300 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005301}
5302
5303void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5304 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5305 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005306}
5307
5308void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
5309 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005310 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005311 locations->SetInAt(i, Location::Any());
5312 }
5313 locations->SetOut(Location::Any());
5314}
5315
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005316void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005317 LOG(FATAL) << "Unreachable";
5318}
5319
Serban Constantinescu02164b32014-11-13 14:05:07 +00005320void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005321 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005322 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005323 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5324 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005325 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5326
5327 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005328 case Primitive::kPrimInt:
5329 case Primitive::kPrimLong:
5330 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005331 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005332 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5333 break;
5334
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005335 case Primitive::kPrimFloat:
5336 case Primitive::kPrimDouble: {
5337 InvokeRuntimeCallingConvention calling_convention;
5338 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5339 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5340 locations->SetOut(calling_convention.GetReturnLocation(type));
5341
5342 break;
5343 }
5344
Serban Constantinescu02164b32014-11-13 14:05:07 +00005345 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005346 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005347 }
5348}
5349
5350void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
5351 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005352
Serban Constantinescu02164b32014-11-13 14:05:07 +00005353 switch (type) {
5354 case Primitive::kPrimInt:
5355 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08005356 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005357 break;
5358 }
5359
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005360 case Primitive::kPrimFloat:
5361 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005362 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5363 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005364 if (type == Primitive::kPrimFloat) {
5365 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5366 } else {
5367 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5368 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005369 break;
5370 }
5371
Serban Constantinescu02164b32014-11-13 14:05:07 +00005372 default:
5373 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005374 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005375 }
5376}
5377
Calin Juravle27df7582015-04-17 19:12:31 +01005378void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5379 memory_barrier->SetLocations(nullptr);
5380}
5381
5382void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005383 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005384}
5385
Alexandre Rames5319def2014-10-23 10:03:10 +01005386void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
5387 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5388 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005389 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005390}
5391
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005392void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005393 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005394}
5395
5396void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5397 instruction->SetLocations(nullptr);
5398}
5399
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005400void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005401 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005402}
5403
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005404void LocationsBuilderARM64::VisitRor(HRor* ror) {
5405 HandleBinaryOp(ror);
5406}
5407
5408void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5409 HandleBinaryOp(ror);
5410}
5411
Serban Constantinescu02164b32014-11-13 14:05:07 +00005412void LocationsBuilderARM64::VisitShl(HShl* shl) {
5413 HandleShift(shl);
5414}
5415
5416void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5417 HandleShift(shl);
5418}
5419
5420void LocationsBuilderARM64::VisitShr(HShr* shr) {
5421 HandleShift(shr);
5422}
5423
5424void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5425 HandleShift(shr);
5426}
5427
Alexandre Rames5319def2014-10-23 10:03:10 +01005428void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005429 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005430}
5431
5432void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005433 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005434}
5435
Alexandre Rames67555f72014-11-18 10:55:16 +00005436void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005437 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00005438}
5439
5440void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005441 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005442}
5443
5444void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005445 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005446}
5447
Alexandre Rames67555f72014-11-18 10:55:16 +00005448void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005449 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005450}
5451
Calin Juravlee460d1d2015-09-29 04:52:17 +01005452void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5453 HUnresolvedInstanceFieldGet* instruction) {
5454 FieldAccessCallingConventionARM64 calling_convention;
5455 codegen_->CreateUnresolvedFieldLocationSummary(
5456 instruction, instruction->GetFieldType(), calling_convention);
5457}
5458
5459void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5460 HUnresolvedInstanceFieldGet* instruction) {
5461 FieldAccessCallingConventionARM64 calling_convention;
5462 codegen_->GenerateUnresolvedFieldAccess(instruction,
5463 instruction->GetFieldType(),
5464 instruction->GetFieldIndex(),
5465 instruction->GetDexPc(),
5466 calling_convention);
5467}
5468
5469void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5470 HUnresolvedInstanceFieldSet* instruction) {
5471 FieldAccessCallingConventionARM64 calling_convention;
5472 codegen_->CreateUnresolvedFieldLocationSummary(
5473 instruction, instruction->GetFieldType(), calling_convention);
5474}
5475
5476void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5477 HUnresolvedInstanceFieldSet* instruction) {
5478 FieldAccessCallingConventionARM64 calling_convention;
5479 codegen_->GenerateUnresolvedFieldAccess(instruction,
5480 instruction->GetFieldType(),
5481 instruction->GetFieldIndex(),
5482 instruction->GetDexPc(),
5483 calling_convention);
5484}
5485
5486void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5487 HUnresolvedStaticFieldGet* instruction) {
5488 FieldAccessCallingConventionARM64 calling_convention;
5489 codegen_->CreateUnresolvedFieldLocationSummary(
5490 instruction, instruction->GetFieldType(), calling_convention);
5491}
5492
5493void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5494 HUnresolvedStaticFieldGet* instruction) {
5495 FieldAccessCallingConventionARM64 calling_convention;
5496 codegen_->GenerateUnresolvedFieldAccess(instruction,
5497 instruction->GetFieldType(),
5498 instruction->GetFieldIndex(),
5499 instruction->GetDexPc(),
5500 calling_convention);
5501}
5502
5503void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5504 HUnresolvedStaticFieldSet* instruction) {
5505 FieldAccessCallingConventionARM64 calling_convention;
5506 codegen_->CreateUnresolvedFieldLocationSummary(
5507 instruction, instruction->GetFieldType(), calling_convention);
5508}
5509
5510void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5511 HUnresolvedStaticFieldSet* instruction) {
5512 FieldAccessCallingConventionARM64 calling_convention;
5513 codegen_->GenerateUnresolvedFieldAccess(instruction,
5514 instruction->GetFieldType(),
5515 instruction->GetFieldIndex(),
5516 instruction->GetDexPc(),
5517 calling_convention);
5518}
5519
Alexandre Rames5319def2014-10-23 10:03:10 +01005520void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005521 LocationSummary* locations =
5522 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005523 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01005524}
5525
5526void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005527 HBasicBlock* block = instruction->GetBlock();
5528 if (block->GetLoopInformation() != nullptr) {
5529 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5530 // The back edge will generate the suspend check.
5531 return;
5532 }
5533 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5534 // The goto will generate the suspend check.
5535 return;
5536 }
5537 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01005538}
5539
Alexandre Rames67555f72014-11-18 10:55:16 +00005540void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
5541 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005542 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005543 InvokeRuntimeCallingConvention calling_convention;
5544 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5545}
5546
5547void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005548 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005549 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005550}
5551
5552void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5553 LocationSummary* locations =
5554 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
5555 Primitive::Type input_type = conversion->GetInputType();
5556 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00005557 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00005558 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5559 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5560 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5561 }
5562
Alexandre Rames542361f2015-01-29 16:57:31 +00005563 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005564 locations->SetInAt(0, Location::RequiresFpuRegister());
5565 } else {
5566 locations->SetInAt(0, Location::RequiresRegister());
5567 }
5568
Alexandre Rames542361f2015-01-29 16:57:31 +00005569 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005570 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5571 } else {
5572 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5573 }
5574}
5575
5576void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
5577 Primitive::Type result_type = conversion->GetResultType();
5578 Primitive::Type input_type = conversion->GetInputType();
5579
5580 DCHECK_NE(input_type, result_type);
5581
Alexandre Rames542361f2015-01-29 16:57:31 +00005582 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005583 int result_size = Primitive::ComponentSize(result_type);
5584 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005585 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005586 Register output = OutputRegister(conversion);
5587 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00005588 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005589 // 'int' values are used directly as W registers, discarding the top
5590 // bits, so we don't need to sign-extend and can just perform a move.
5591 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5592 // top 32 bits of the target register. We theoretically could leave those
5593 // bits unchanged, but we would have to make sure that no code uses a
5594 // 32bit input value as a 64bit value assuming that the top 32 bits are
5595 // zero.
5596 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00005597 } else if (result_type == Primitive::kPrimChar ||
5598 (input_type == Primitive::kPrimChar && input_size < result_size)) {
5599 __ Ubfx(output,
5600 output.IsX() ? source.X() : source.W(),
5601 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005602 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005603 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005604 }
Alexandre Rames542361f2015-01-29 16:57:31 +00005605 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005606 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005607 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005608 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
5609 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005610 } else if (Primitive::IsFloatingPointType(result_type) &&
5611 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005612 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5613 } else {
5614 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5615 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005616 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005617}
Alexandre Rames67555f72014-11-18 10:55:16 +00005618
Serban Constantinescu02164b32014-11-13 14:05:07 +00005619void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5620 HandleShift(ushr);
5621}
5622
5623void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5624 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005625}
5626
5627void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5628 HandleBinaryOp(instruction);
5629}
5630
5631void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5632 HandleBinaryOp(instruction);
5633}
5634
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005635void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005636 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005637 LOG(FATAL) << "Unreachable";
5638}
5639
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005640void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005641 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005642 LOG(FATAL) << "Unreachable";
5643}
5644
Mark Mendellfe57faa2015-09-18 09:26:15 -04005645// Simple implementation of packed switch - generate cascaded compare/jumps.
5646void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5647 LocationSummary* locations =
5648 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
5649 locations->SetInAt(0, Location::RequiresRegister());
5650}
5651
5652void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5653 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005654 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005655 Register value_reg = InputRegisterAt(switch_instr, 0);
5656 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5657
Zheng Xu3927c8b2015-11-18 17:46:25 +08005658 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005659 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005660 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5661 // make sure we don't emit it if the target may run out of range.
5662 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5663 // ranges and emit the tables only as required.
5664 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005665
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005666 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005667 // Current instruction id is an upper bound of the number of HIRs in the graph.
5668 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5669 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005670 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5671 Register temp = temps.AcquireW();
5672 __ Subs(temp, value_reg, Operand(lower_bound));
5673
Zheng Xu3927c8b2015-11-18 17:46:25 +08005674 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005675 // Jump to successors[0] if value == lower_bound.
5676 __ B(eq, codegen_->GetLabelOf(successors[0]));
5677 int32_t last_index = 0;
5678 for (; num_entries - last_index > 2; last_index += 2) {
5679 __ Subs(temp, temp, Operand(2));
5680 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5681 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5682 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5683 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5684 }
5685 if (num_entries - last_index == 2) {
5686 // The last missing case_value.
5687 __ Cmp(temp, Operand(1));
5688 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005689 }
5690
5691 // And the default for any other value.
5692 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5693 __ B(codegen_->GetLabelOf(default_block));
5694 }
5695 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005696 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005697
5698 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5699
5700 // Below instructions should use at most one blocked register. Since there are two blocked
5701 // registers, we are free to block one.
5702 Register temp_w = temps.AcquireW();
5703 Register index;
5704 // Remove the bias.
5705 if (lower_bound != 0) {
5706 index = temp_w;
5707 __ Sub(index, value_reg, Operand(lower_bound));
5708 } else {
5709 index = value_reg;
5710 }
5711
5712 // Jump to default block if index is out of the range.
5713 __ Cmp(index, Operand(num_entries));
5714 __ B(hs, codegen_->GetLabelOf(default_block));
5715
5716 // In current VIXL implementation, it won't require any blocked registers to encode the
5717 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5718 // register pressure.
5719 Register table_base = temps.AcquireX();
5720 // Load jump offset from the table.
5721 __ Adr(table_base, jump_table->GetTableStartLabel());
5722 Register jump_offset = temp_w;
5723 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5724
5725 // Jump to target block by branching to table_base(pc related) + offset.
5726 Register target_address = table_base;
5727 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5728 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005729 }
5730}
5731
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005732void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5733 HInstruction* instruction,
5734 Location out,
5735 uint32_t offset,
5736 Location maybe_temp,
5737 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005738 Primitive::Type type = Primitive::kPrimNot;
5739 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005740 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005741 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005742 Register temp_reg = RegisterFrom(maybe_temp, type);
5743 if (kUseBakerReadBarrier) {
5744 // Load with fast path based Baker's read barrier.
5745 // /* HeapReference<Object> */ out = *(out + offset)
5746 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5747 out,
5748 out_reg,
5749 offset,
5750 temp_reg,
5751 /* needs_null_check */ false,
5752 /* use_load_acquire */ false);
5753 } else {
5754 // Load with slow path based read barrier.
5755 // Save the value of `out` into `maybe_temp` before overwriting it
5756 // in the following move operation, as we will need it for the
5757 // read barrier below.
5758 __ Mov(temp_reg, out_reg);
5759 // /* HeapReference<Object> */ out = *(out + offset)
5760 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5761 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5762 }
5763 } else {
5764 // Plain load with no read barrier.
5765 // /* HeapReference<Object> */ out = *(out + offset)
5766 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5767 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5768 }
5769}
5770
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005771void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5772 HInstruction* instruction,
5773 Location out,
5774 Location obj,
5775 uint32_t offset,
5776 Location maybe_temp,
5777 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005778 Primitive::Type type = Primitive::kPrimNot;
5779 Register out_reg = RegisterFrom(out, type);
5780 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005781 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005782 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005783 if (kUseBakerReadBarrier) {
5784 // Load with fast path based Baker's read barrier.
5785 Register temp_reg = RegisterFrom(maybe_temp, type);
5786 // /* HeapReference<Object> */ out = *(obj + offset)
5787 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5788 out,
5789 obj_reg,
5790 offset,
5791 temp_reg,
5792 /* needs_null_check */ false,
5793 /* use_load_acquire */ false);
5794 } else {
5795 // Load with slow path based read barrier.
5796 // /* HeapReference<Object> */ out = *(obj + offset)
5797 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5798 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5799 }
5800 } else {
5801 // Plain load with no read barrier.
5802 // /* HeapReference<Object> */ out = *(obj + offset)
5803 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5804 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5805 }
5806}
5807
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005808void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(
5809 HInstruction* instruction,
5810 Location root,
5811 Register obj,
5812 uint32_t offset,
5813 vixl::aarch64::Label* fixup_label,
5814 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005815 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005816 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005817 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005818 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005819 if (kUseBakerReadBarrier) {
5820 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00005821 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00005822 //
Roland Levillainba650a42017-03-06 13:52:32 +00005823 // Note that we do not actually check the value of
5824 // `GetIsGcMarking()` to decide whether to mark the loaded GC
5825 // root or not. Instead, we load into `temp` the read barrier
5826 // mark entry point corresponding to register `root`. If `temp`
5827 // is null, it means that `GetIsGcMarking()` is false, and vice
5828 // versa.
5829 //
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005830 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
Roland Levillainba650a42017-03-06 13:52:32 +00005831 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5832 // if (temp != nullptr) { // <=> Thread::Current()->GetIsGcMarking()
5833 // // Slow path.
5834 // root = temp(root); // root = ReadBarrier::Mark(root); // Runtime entry point call.
Roland Levillain44015862016-01-22 11:47:17 +00005835 // }
5836
Roland Levillainba650a42017-03-06 13:52:32 +00005837 // Slow path marking the GC root `root`. The entrypoint will already be loaded in `temp`.
5838 Register temp = lr;
5839 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(
5840 instruction, root, /* entrypoint */ LocationFrom(temp));
5841 codegen_->AddSlowPath(slow_path);
5842
5843 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5844 const int32_t entry_point_offset =
5845 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(root.reg());
5846 // Loading the entrypoint does not require a load acquire since it is only changed when
5847 // threads are suspended or running a checkpoint.
5848 __ Ldr(temp, MemOperand(tr, entry_point_offset));
5849
Roland Levillain44015862016-01-22 11:47:17 +00005850 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005851 if (fixup_label == nullptr) {
5852 __ Ldr(root_reg, MemOperand(obj, offset));
5853 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005854 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005855 }
Roland Levillain44015862016-01-22 11:47:17 +00005856 static_assert(
5857 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5858 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5859 "have different sizes.");
5860 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5861 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5862 "have different sizes.");
5863
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005864 // The entrypoint is null when the GC is not marking, this prevents one load compared to
5865 // checking GetIsGcMarking.
Roland Levillain44015862016-01-22 11:47:17 +00005866 __ Cbnz(temp, slow_path->GetEntryLabel());
5867 __ Bind(slow_path->GetExitLabel());
5868 } else {
5869 // GC root loaded through a slow path for read barriers other
5870 // than Baker's.
5871 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005872 if (fixup_label == nullptr) {
5873 __ Add(root_reg.X(), obj.X(), offset);
5874 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005875 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005876 }
Roland Levillain44015862016-01-22 11:47:17 +00005877 // /* mirror::Object* */ root = root->Read()
5878 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5879 }
5880 } else {
5881 // Plain GC root load with no read barrier.
5882 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005883 if (fixup_label == nullptr) {
5884 __ Ldr(root_reg, MemOperand(obj, offset));
5885 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005886 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005887 }
Roland Levillain44015862016-01-22 11:47:17 +00005888 // Note that GC roots are not affected by heap poisoning, thus we
5889 // do not have to unpoison `root_reg` here.
5890 }
5891}
5892
5893void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5894 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005895 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005896 uint32_t offset,
5897 Register temp,
5898 bool needs_null_check,
5899 bool use_load_acquire) {
5900 DCHECK(kEmitCompilerReadBarrier);
5901 DCHECK(kUseBakerReadBarrier);
5902
5903 // /* HeapReference<Object> */ ref = *(obj + offset)
5904 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005905 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01005906 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5907 ref,
5908 obj,
5909 offset,
5910 no_index,
5911 no_scale_factor,
5912 temp,
5913 needs_null_check,
5914 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005915}
5916
5917void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5918 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005919 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005920 uint32_t data_offset,
5921 Location index,
5922 Register temp,
5923 bool needs_null_check) {
5924 DCHECK(kEmitCompilerReadBarrier);
5925 DCHECK(kUseBakerReadBarrier);
5926
5927 // Array cells are never volatile variables, therefore array loads
5928 // never use Load-Acquire instructions on ARM64.
5929 const bool use_load_acquire = false;
5930
Roland Levillainbfea3352016-06-23 13:48:47 +01005931 static_assert(
5932 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5933 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005934 // /* HeapReference<Object> */ ref =
5935 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005936 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5937 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5938 ref,
5939 obj,
5940 data_offset,
5941 index,
5942 scale_factor,
5943 temp,
5944 needs_null_check,
5945 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005946}
5947
5948void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5949 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005950 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005951 uint32_t offset,
5952 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005953 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005954 Register temp,
5955 bool needs_null_check,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005956 bool use_load_acquire,
5957 bool always_update_field) {
Roland Levillain44015862016-01-22 11:47:17 +00005958 DCHECK(kEmitCompilerReadBarrier);
5959 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005960 // If we are emitting an array load, we should not be using a
5961 // Load Acquire instruction. In other words:
5962 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5963 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005964
Roland Levillain54f869e2017-03-06 13:54:11 +00005965 // Query `art::Thread::Current()->GetIsGcMarking()` to decide
5966 // whether we need to enter the slow path to mark the reference.
5967 // Then, in the slow path, check the gray bit in the lock word of
5968 // the reference's holder (`obj`) to decide whether to mark `ref` or
5969 // not.
Roland Levillain44015862016-01-22 11:47:17 +00005970 //
Roland Levillainba650a42017-03-06 13:52:32 +00005971 // Note that we do not actually check the value of `GetIsGcMarking()`;
5972 // instead, we load into `temp2` the read barrier mark entry point
5973 // corresponding to register `ref`. If `temp2` is null, it means
5974 // that `GetIsGcMarking()` is false, and vice versa.
5975 //
5976 // temp2 = Thread::Current()->pReadBarrierMarkReg ## root.reg()
Roland Levillainba650a42017-03-06 13:52:32 +00005977 // if (temp2 != nullptr) { // <=> Thread::Current()->GetIsGcMarking()
5978 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00005979 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5980 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5981 // HeapReference<mirror::Object> ref = *src; // Original reference load.
5982 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5983 // if (is_gray) {
5984 // ref = temp2(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
5985 // }
5986 // } else {
5987 // HeapReference<mirror::Object> ref = *src; // Original reference load.
Roland Levillain44015862016-01-22 11:47:17 +00005988 // }
Roland Levillain44015862016-01-22 11:47:17 +00005989
Roland Levillainba650a42017-03-06 13:52:32 +00005990 // Slow path marking the object `ref` when the GC is marking. The
5991 // entrypoint will already be loaded in `temp2`.
5992 Register temp2 = lr;
5993 Location temp2_loc = LocationFrom(temp2);
5994 SlowPathCodeARM64* slow_path;
5995 if (always_update_field) {
Roland Levillain54f869e2017-03-06 13:54:11 +00005996 // LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
5997 // only supports address of the form `obj + field_offset`, where
5998 // `obj` is a register and `field_offset` is a register. Thus
5999 // `offset` and `scale_factor` above are expected to be null in
6000 // this code path.
Roland Levillainba650a42017-03-06 13:52:32 +00006001 DCHECK_EQ(offset, 0u);
6002 DCHECK_EQ(scale_factor, 0u); /* "times 1" */
Roland Levillain54f869e2017-03-06 13:54:11 +00006003 Location field_offset = index;
6004 slow_path =
6005 new (GetGraph()->GetArena()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
6006 instruction,
6007 ref,
6008 obj,
6009 offset,
6010 /* index */ field_offset,
6011 scale_factor,
6012 needs_null_check,
6013 use_load_acquire,
6014 temp,
6015 /* entrypoint */ temp2_loc);
Roland Levillainba650a42017-03-06 13:52:32 +00006016 } else {
Roland Levillain54f869e2017-03-06 13:54:11 +00006017 slow_path = new (GetGraph()->GetArena()) LoadReferenceWithBakerReadBarrierSlowPathARM64(
6018 instruction,
6019 ref,
6020 obj,
6021 offset,
6022 index,
6023 scale_factor,
6024 needs_null_check,
6025 use_load_acquire,
6026 temp,
6027 /* entrypoint */ temp2_loc);
Roland Levillainba650a42017-03-06 13:52:32 +00006028 }
6029 AddSlowPath(slow_path);
6030
6031 // temp2 = Thread::Current()->pReadBarrierMarkReg ## ref.reg()
6032 const int32_t entry_point_offset =
6033 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref.reg());
6034 // Loading the entrypoint does not require a load acquire since it is only changed when
6035 // threads are suspended or running a checkpoint.
6036 __ Ldr(temp2, MemOperand(tr, entry_point_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006037 // The entrypoint is null when the GC is not marking, this prevents one load compared to
6038 // checking GetIsGcMarking.
6039 __ Cbnz(temp2, slow_path->GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +00006040 // Fast path: just load the reference.
6041 GenerateRawReferenceLoad(
6042 instruction, ref, obj, offset, index, scale_factor, needs_null_check, use_load_acquire);
Roland Levillainba650a42017-03-06 13:52:32 +00006043 __ Bind(slow_path->GetExitLabel());
6044}
6045
6046void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6047 Location ref,
6048 Register obj,
6049 uint32_t offset,
6050 Location index,
6051 size_t scale_factor,
6052 bool needs_null_check,
6053 bool use_load_acquire) {
6054 DCHECK(obj.IsW());
Roland Levillain44015862016-01-22 11:47:17 +00006055 Primitive::Type type = Primitive::kPrimNot;
6056 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006057
Roland Levillainba650a42017-03-06 13:52:32 +00006058 // If needed, vixl::EmissionCheckScope guards are used to ensure
6059 // that no pools are emitted between the load (macro) instruction
6060 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006061
Roland Levillain44015862016-01-22 11:47:17 +00006062 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006063 // Load types involving an "index": ArrayGet,
6064 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6065 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006066 if (use_load_acquire) {
6067 // UnsafeGetObjectVolatile intrinsic case.
6068 // Register `index` is not an index in an object array, but an
6069 // offset to an object reference field within object `obj`.
6070 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6071 DCHECK(instruction->GetLocations()->Intrinsified());
6072 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6073 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006074 DCHECK_EQ(offset, 0u);
6075 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006076 DCHECK_EQ(needs_null_check, false);
6077 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006078 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6079 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006080 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006081 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6082 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006083 if (index.IsConstant()) {
6084 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006085 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006086 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006087 if (needs_null_check) {
6088 MaybeRecordImplicitNullCheck(instruction);
6089 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006090 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006091 UseScratchRegisterScope temps(GetVIXLAssembler());
6092 Register temp = temps.AcquireW();
6093 __ Add(temp, obj, offset);
6094 {
6095 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6096 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6097 if (needs_null_check) {
6098 MaybeRecordImplicitNullCheck(instruction);
6099 }
6100 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006101 }
Roland Levillain44015862016-01-22 11:47:17 +00006102 }
Roland Levillain44015862016-01-22 11:47:17 +00006103 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006104 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006105 MemOperand field = HeapOperand(obj, offset);
6106 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006107 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6108 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006109 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006110 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006111 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006112 if (needs_null_check) {
6113 MaybeRecordImplicitNullCheck(instruction);
6114 }
Roland Levillain44015862016-01-22 11:47:17 +00006115 }
6116 }
6117
6118 // Object* ref = ref_addr->AsMirrorPtr()
6119 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006120}
6121
6122void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6123 Location out,
6124 Location ref,
6125 Location obj,
6126 uint32_t offset,
6127 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006128 DCHECK(kEmitCompilerReadBarrier);
6129
Roland Levillain44015862016-01-22 11:47:17 +00006130 // Insert a slow path based read barrier *after* the reference load.
6131 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006132 // If heap poisoning is enabled, the unpoisoning of the loaded
6133 // reference will be carried out by the runtime within the slow
6134 // path.
6135 //
6136 // Note that `ref` currently does not get unpoisoned (when heap
6137 // poisoning is enabled), which is alright as the `ref` argument is
6138 // not used by the artReadBarrierSlow entry point.
6139 //
6140 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6141 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
6142 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6143 AddSlowPath(slow_path);
6144
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006145 __ B(slow_path->GetEntryLabel());
6146 __ Bind(slow_path->GetExitLabel());
6147}
6148
Roland Levillain44015862016-01-22 11:47:17 +00006149void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6150 Location out,
6151 Location ref,
6152 Location obj,
6153 uint32_t offset,
6154 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006155 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006156 // Baker's read barriers shall be handled by the fast path
6157 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6158 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006159 // If heap poisoning is enabled, unpoisoning will be taken care of
6160 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006161 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006162 } else if (kPoisonHeapReferences) {
6163 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6164 }
6165}
6166
Roland Levillain44015862016-01-22 11:47:17 +00006167void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6168 Location out,
6169 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006170 DCHECK(kEmitCompilerReadBarrier);
6171
Roland Levillain44015862016-01-22 11:47:17 +00006172 // Insert a slow path based read barrier *after* the GC root load.
6173 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006174 // Note that GC roots are not affected by heap poisoning, so we do
6175 // not need to do anything special for this here.
6176 SlowPathCodeARM64* slow_path =
6177 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
6178 AddSlowPath(slow_path);
6179
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006180 __ B(slow_path->GetEntryLabel());
6181 __ Bind(slow_path->GetExitLabel());
6182}
6183
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006184void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6185 LocationSummary* locations =
6186 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6187 locations->SetInAt(0, Location::RequiresRegister());
6188 locations->SetOut(Location::RequiresRegister());
6189}
6190
6191void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6192 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006193 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006194 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006195 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006196 __ Ldr(XRegisterFrom(locations->Out()),
6197 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006198 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006199 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006200 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006201 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6202 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006203 __ Ldr(XRegisterFrom(locations->Out()),
6204 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006205 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006206}
6207
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006208static void PatchJitRootUse(uint8_t* code,
6209 const uint8_t* roots_data,
6210 vixl::aarch64::Literal<uint32_t>* literal,
6211 uint64_t index_in_table) {
6212 uint32_t literal_offset = literal->GetOffset();
6213 uintptr_t address =
6214 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6215 uint8_t* data = code + literal_offset;
6216 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6217}
6218
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006219void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6220 for (const auto& entry : jit_string_patches_) {
6221 const auto& it = jit_string_roots_.find(entry.first);
6222 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006223 PatchJitRootUse(code, roots_data, entry.second, it->second);
6224 }
6225 for (const auto& entry : jit_class_patches_) {
6226 const auto& it = jit_class_roots_.find(entry.first);
6227 DCHECK(it != jit_class_roots_.end());
6228 PatchJitRootUse(code, roots_data, entry.second, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006229 }
6230}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006231
Alexandre Rames67555f72014-11-18 10:55:16 +00006232#undef __
6233#undef QUICK_ENTRY_POINT
6234
Alexandre Rames5319def2014-10-23 10:03:10 +01006235} // namespace arm64
6236} // namespace art