blob: edccbd49044ebc0798431cdc269e4f9ed454e47f [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000037using vixl::ExactAssemblyScope;
38using vixl::CodeBufferCheckScope;
39using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010040
41#ifdef __
42#error "ARM64 Codegen VIXL macro-assembler macro already defined."
43#endif
44
Alexandre Rames5319def2014-10-23 10:03:10 +010045namespace art {
46
Roland Levillain22ccc3a2015-11-24 13:10:05 +000047template<class MirrorType>
48class GcRoot;
49
Alexandre Rames5319def2014-10-23 10:03:10 +010050namespace arm64 {
51
Alexandre Ramesbe919d92016-08-23 18:33:36 +010052using helpers::ARM64EncodableConstantOrRegister;
53using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054using helpers::CPURegisterFrom;
55using helpers::DRegisterFrom;
56using helpers::FPRegisterFrom;
57using helpers::HeapOperand;
58using helpers::HeapOperandFrom;
59using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010063using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080064using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010065using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080066using helpers::LocationFrom;
67using helpers::OperandFromMemOperand;
68using helpers::OutputCPURegister;
69using helpers::OutputFPRegister;
70using helpers::OutputRegister;
71using helpers::RegisterFrom;
72using helpers::StackOperandFrom;
73using helpers::VIXLRegCodeFromART;
74using helpers::WRegisterFrom;
75using helpers::XRegisterFrom;
76
Alexandre Rames5319def2014-10-23 10:03:10 +010077static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080079// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
80// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000081static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010082
Alexandre Rames5319def2014-10-23 10:03:10 +010083inline Condition ARM64Condition(IfCondition cond) {
84 switch (cond) {
85 case kCondEQ: return eq;
86 case kCondNE: return ne;
87 case kCondLT: return lt;
88 case kCondLE: return le;
89 case kCondGT: return gt;
90 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070091 case kCondB: return lo;
92 case kCondBE: return ls;
93 case kCondA: return hi;
94 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010095 }
Roland Levillain7f63c522015-07-13 15:54:55 +000096 LOG(FATAL) << "Unreachable";
97 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010098}
99
Vladimir Markod6e069b2016-01-18 11:11:01 +0000100inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
101 // The ARM64 condition codes can express all the necessary branches, see the
102 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
103 // There is no dex instruction or HIR that would need the missing conditions
104 // "equal or unordered" or "not equal".
105 switch (cond) {
106 case kCondEQ: return eq;
107 case kCondNE: return ne /* unordered */;
108 case kCondLT: return gt_bias ? cc : lt /* unordered */;
109 case kCondLE: return gt_bias ? ls : le /* unordered */;
110 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
111 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
112 default:
113 LOG(FATAL) << "UNREACHABLE";
114 UNREACHABLE();
115 }
116}
117
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000118Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000119 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
120 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
121 // but we use the exact registers for clarity.
122 if (return_type == Primitive::kPrimFloat) {
123 return LocationFrom(s0);
124 } else if (return_type == Primitive::kPrimDouble) {
125 return LocationFrom(d0);
126 } else if (return_type == Primitive::kPrimLong) {
127 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100128 } else if (return_type == Primitive::kPrimVoid) {
129 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000130 } else {
131 return LocationFrom(w0);
132 }
133}
134
Alexandre Rames5319def2014-10-23 10:03:10 +0100135Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100137}
138
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100139// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
140#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700141#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100142
Zheng Xuda403092015-04-24 17:35:39 +0800143// Calculate memory accessing operand for save/restore live registers.
144static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800146 int64_t spill_offset,
147 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100148 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
149 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
150 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800151 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800153 codegen->GetNumberOfFloatingPointRegisters()));
154
Vladimir Marko804b03f2016-09-14 16:26:36 +0100155 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
156 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800157
158 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
159 UseScratchRegisterScope temps(masm);
160
161 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
163 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800164 int64_t reg_size = kXRegSizeInBytes;
165 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
166 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100167 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800168 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
169 // If the offset does not fit in the instruction's immediate field, use an alternate register
170 // to compute the base address(float point registers spill base address).
171 Register new_base = temps.AcquireSameSizeAs(base);
172 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
173 base = new_base;
174 spill_offset = -core_spill_size;
175 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
176 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
177 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
178 }
179
180 if (is_save) {
181 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
182 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 } else {
184 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
185 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
186 }
187}
188
189void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800190 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100191 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
192 for (uint32_t i : LowToHighBits(core_spills)) {
193 // If the register holds an object, update the stack mask.
194 if (locations->RegisterContainsObject(i)) {
195 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800196 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100197 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
198 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
199 saved_core_stack_offsets_[i] = stack_offset;
200 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800201 }
202
Vladimir Marko804b03f2016-09-14 16:26:36 +0100203 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
204 for (uint32_t i : LowToHighBits(fp_spills)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800209 }
210
Vladimir Marko804b03f2016-09-14 16:26:36 +0100211 SaveRestoreLiveRegistersHelper(codegen,
212 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100217 SaveRestoreLiveRegistersHelper(codegen,
218 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000264 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
Vladimir Markoea4c1262017-02-06 19:59:33 +0000281 bool do_clinit,
282 vixl::aarch64::Register bss_entry_temp = vixl::aarch64::Register(),
283 vixl::aarch64::Label* bss_entry_adrp_label = nullptr)
284 : SlowPathCodeARM64(at),
285 cls_(cls),
286 dex_pc_(dex_pc),
287 do_clinit_(do_clinit),
288 bss_entry_temp_(bss_entry_temp),
289 bss_entry_adrp_label_(bss_entry_adrp_label) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000290 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
291 }
292
293 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000295 Location out = locations->Out();
296 constexpr bool call_saves_everything_except_r0_ip0 = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexandre Rames67555f72014-11-18 10:55:16 +0000297 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
298
Vladimir Markoea4c1262017-02-06 19:59:33 +0000299 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the page address of
300 // the entry which is in a scratch register. Make sure it's not used for saving/restoring
301 // registers. Exclude the scratch register also for non-Baker read barrier for simplicity.
302 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
303 bool is_load_class_bss_entry =
304 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
305 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
306 if (is_load_class_bss_entry) {
307 // This temp is a scratch register.
308 DCHECK(bss_entry_temp_.IsValid());
309 temps.Exclude(bss_entry_temp_);
310 }
311
Alexandre Rames67555f72014-11-18 10:55:16 +0000312 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000313 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314
315 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000316 dex::TypeIndex type_index = cls_->GetTypeIndex();
317 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000318 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
319 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000320 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800321 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100322 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800323 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100324 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800325 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000326
327 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000328 if (out.IsValid()) {
329 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000330 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000331 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000332 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000333 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000334 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
Vladimir Markoea4c1262017-02-06 19:59:33 +0000335 if (is_load_class_bss_entry) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000336 DCHECK(out.IsValid());
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000337 const DexFile& dex_file = cls_->GetDexFile();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000338 if (call_saves_everything_except_r0_ip0) {
339 // The class entry page address was preserved in bss_entry_temp_ thanks to kSaveEverything.
340 } else {
341 // For non-Baker read barrier, we need to re-calculate the address of the class entry page.
342 bss_entry_adrp_label_ = arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
343 arm64_codegen->EmitAdrpPlaceholder(bss_entry_adrp_label_, bss_entry_temp_);
344 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000345 vixl::aarch64::Label* strp_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +0000346 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label_);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000347 {
348 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
349 __ Bind(strp_label);
350 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Markoea4c1262017-02-06 19:59:33 +0000351 MemOperand(bss_entry_temp_, /* offset placeholder */ 0));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000352 }
353 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 __ B(GetExitLabel());
355 }
356
Alexandre Rames9931f312015-06-19 14:47:01 +0100357 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
358
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 private:
360 // The class this slow path will load.
361 HLoadClass* const cls_;
362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 // The dex PC of `at_`.
364 const uint32_t dex_pc_;
365
366 // Whether to initialize the class.
367 const bool do_clinit_;
368
Vladimir Markoea4c1262017-02-06 19:59:33 +0000369 // For HLoadClass/kBssEntry, the temp register and the label of the ADRP where it was loaded.
370 vixl::aarch64::Register bss_entry_temp_;
371 vixl::aarch64::Label* bss_entry_adrp_label_;
372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
374};
375
Vladimir Markoaad75c62016-10-03 08:46:48 +0000376class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
377 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100378 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
379 : SlowPathCodeARM64(instruction),
380 temp_(temp),
381 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000382
383 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
384 LocationSummary* locations = instruction_->GetLocations();
385 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
386 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
387
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100388 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
389 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
390 temps.Exclude(temp_);
391
Vladimir Markoaad75c62016-10-03 08:46:48 +0000392 __ Bind(GetEntryLabel());
393 SaveLiveRegisters(codegen, locations);
394
395 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000396 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
397 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000398 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
399 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
400 Primitive::Type type = instruction_->GetType();
401 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
402
403 RestoreLiveRegisters(codegen, locations);
404
405 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000406 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100407 if (!kUseReadBarrier || kUseBakerReadBarrier) {
408 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
409 } else {
410 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
411 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
412 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
413 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000414 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100415 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000416 {
417 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
418 __ Bind(strp_label);
419 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100420 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000421 }
422
423 __ B(GetExitLabel());
424 }
425
426 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
427
428 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100429 const Register temp_;
430 vixl::aarch64::Label* adrp_label_;
431
Vladimir Markoaad75c62016-10-03 08:46:48 +0000432 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
433};
434
Alexandre Rames5319def2014-10-23 10:03:10 +0100435class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100438
Alexandre Rames67555f72014-11-18 10:55:16 +0000439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
440 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100441 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000442 if (instruction_->CanThrowIntoCatchBlock()) {
443 // Live registers will be restored in the catch block if caught.
444 SaveLiveRegisters(codegen, instruction_->GetLocations());
445 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000446 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
447 instruction_,
448 instruction_->GetDexPc(),
449 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800450 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 }
452
Alexandre Rames8158f282015-08-07 10:26:17 +0100453 bool IsFatal() const OVERRIDE { return true; }
454
Alexandre Rames9931f312015-06-19 14:47:01 +0100455 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
456
Alexandre Rames5319def2014-10-23 10:03:10 +0100457 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100458 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
459};
460
461class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
462 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100463 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000464 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100465
Alexandre Rames67555f72014-11-18 10:55:16 +0000466 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
467 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100468 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000469 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000471 if (successor_ == nullptr) {
472 __ B(GetReturnLabel());
473 } else {
474 __ B(arm64_codegen->GetLabelOf(successor_));
475 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100476 }
477
Scott Wakeling97c72b72016-06-24 16:19:36 +0100478 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100479 DCHECK(successor_ == nullptr);
480 return &return_label_;
481 }
482
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100483 HBasicBlock* GetSuccessor() const {
484 return successor_;
485 }
486
Alexandre Rames9931f312015-06-19 14:47:01 +0100487 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
488
Alexandre Rames5319def2014-10-23 10:03:10 +0100489 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100490 // If not null, the block to branch to after the suspend check.
491 HBasicBlock* const successor_;
492
493 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100494 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100495
496 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
497};
498
Alexandre Rames67555f72014-11-18 10:55:16 +0000499class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
500 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000501 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000502 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000503
504 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000505 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800506
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 DCHECK(instruction_->IsCheckCast()
508 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
509 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100510 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000513
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000514 if (!is_fatal_) {
515 SaveLiveRegisters(codegen, locations);
516 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000517
518 // We're moving two locations to locations that could overlap, so we need a parallel
519 // move resolver.
520 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800521 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800522 LocationFrom(calling_convention.GetRegisterAt(0)),
523 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800524 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800525 LocationFrom(calling_convention.GetRegisterAt(1)),
526 Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800529 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000530 Primitive::Type ret_type = instruction_->GetType();
531 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
532 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
533 } else {
534 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800535 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
536 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000537 }
538
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000539 if (!is_fatal_) {
540 RestoreLiveRegisters(codegen, locations);
541 __ B(GetExitLabel());
542 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000543 }
544
Alexandre Rames9931f312015-06-19 14:47:01 +0100545 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100546 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100547
Alexandre Rames67555f72014-11-18 10:55:16 +0000548 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000549 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000550
Alexandre Rames67555f72014-11-18 10:55:16 +0000551 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
552};
553
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700554class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
555 public:
Aart Bik42249c32016-01-07 15:33:50 -0800556 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000557 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700558
559 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800560 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700561 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000562 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000563 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700564 }
565
Alexandre Rames9931f312015-06-19 14:47:01 +0100566 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700568 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700569 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
570};
571
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100572class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
573 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000574 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100575
576 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
577 LocationSummary* locations = instruction_->GetLocations();
578 __ Bind(GetEntryLabel());
579 SaveLiveRegisters(codegen, locations);
580
581 InvokeRuntimeCallingConvention calling_convention;
582 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
583 parallel_move.AddMove(
584 locations->InAt(0),
585 LocationFrom(calling_convention.GetRegisterAt(0)),
586 Primitive::kPrimNot,
587 nullptr);
588 parallel_move.AddMove(
589 locations->InAt(1),
590 LocationFrom(calling_convention.GetRegisterAt(1)),
591 Primitive::kPrimInt,
592 nullptr);
593 parallel_move.AddMove(
594 locations->InAt(2),
595 LocationFrom(calling_convention.GetRegisterAt(2)),
596 Primitive::kPrimNot,
597 nullptr);
598 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
599
600 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000601 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100602 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
603 RestoreLiveRegisters(codegen, locations);
604 __ B(GetExitLabel());
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
608
609 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100610 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
611};
612
Zheng Xu3927c8b2015-11-18 17:46:25 +0800613void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
614 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000615 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800616
617 // We are about to use the assembler to place literals directly. Make sure we have enough
618 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000619 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
620 num_entries * sizeof(int32_t),
621 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800622
623 __ Bind(&table_start_);
624 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
625 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100626 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800627 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100628 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800629 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
630 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
631 Literal<int32_t> literal(jump_offset);
632 __ place(&literal);
633 }
634}
635
Roland Levillain47b3ab22017-02-27 14:31:35 +0000636// Slow path marking an object reference `ref` during a read
637// barrier. The field `obj.field` in the object `obj` holding this
638// reference does not get updated by this slow path after marking (see
639// ReadBarrierMarkAndUpdateFieldSlowPathARM64 below for that).
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000640//
Roland Levillain47b3ab22017-02-27 14:31:35 +0000641// This means that after the execution of this slow path, `ref` will
642// always be up-to-date, but `obj.field` may not; i.e., after the
643// flip, `ref` will be a to-space reference, but `obj.field` will
644// probably still be a from-space reference (unless it gets updated by
645// another thread, or if another thread installed another object
646// reference (different from `ref`) in `obj.field`).
647//
648// If `entrypoint` is a valid location it is assumed to already be
649// holding the entrypoint. The case where the entrypoint is passed in
Roland Levillain35345a52017-02-27 14:32:08 +0000650// is for the GcRoot read barrier.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000651class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
652 public:
653 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
654 Location ref,
655 Location entrypoint = Location::NoLocation())
656 : SlowPathCodeARM64(instruction),
657 ref_(ref),
658 entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000659 DCHECK(kEmitCompilerReadBarrier);
660 }
661
Roland Levillain47b3ab22017-02-27 14:31:35 +0000662 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000663
Roland Levillain47b3ab22017-02-27 14:31:35 +0000664 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
665 LocationSummary* locations = instruction_->GetLocations();
666 DCHECK(locations->CanCall());
667 DCHECK(ref_.IsRegister()) << ref_;
668 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
669 DCHECK(instruction_->IsInstanceFieldGet() ||
670 instruction_->IsStaticFieldGet() ||
671 instruction_->IsArrayGet() ||
672 instruction_->IsArraySet() ||
673 instruction_->IsLoadClass() ||
674 instruction_->IsLoadString() ||
675 instruction_->IsInstanceOf() ||
676 instruction_->IsCheckCast() ||
677 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
678 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
679 << "Unexpected instruction in read barrier marking slow path: "
680 << instruction_->DebugName();
681 // The read barrier instrumentation of object ArrayGet
682 // instructions does not support the HIntermediateAddress
683 // instruction.
684 DCHECK(!(instruction_->IsArrayGet() &&
685 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
686
687 __ Bind(GetEntryLabel());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000688 // No need to save live registers; it's taken care of by the
689 // entrypoint. Also, there is no need to update the stack mask,
690 // as this runtime call will not trigger a garbage collection.
691 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
692 DCHECK_NE(ref_.reg(), LR);
693 DCHECK_NE(ref_.reg(), WSP);
694 DCHECK_NE(ref_.reg(), WZR);
695 // IP0 is used internally by the ReadBarrierMarkRegX entry point
696 // as a temporary, it cannot be the entry point's input/output.
697 DCHECK_NE(ref_.reg(), IP0);
698 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
699 // "Compact" slow path, saving two moves.
700 //
701 // Instead of using the standard runtime calling convention (input
702 // and output in W0):
703 //
704 // W0 <- ref
705 // W0 <- ReadBarrierMark(W0)
706 // ref <- W0
707 //
708 // we just use rX (the register containing `ref`) as input and output
709 // of a dedicated entrypoint:
710 //
711 // rX <- ReadBarrierMarkRegX(rX)
712 //
713 if (entrypoint_.IsValid()) {
714 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
715 __ Blr(XRegisterFrom(entrypoint_));
716 } else {
717 // Entrypoint is not already loaded, load from the thread.
718 int32_t entry_point_offset =
719 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
720 // This runtime call does not require a stack map.
721 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
722 }
Roland Levillain47b3ab22017-02-27 14:31:35 +0000723 __ B(GetExitLabel());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000724 }
725
Roland Levillain47b3ab22017-02-27 14:31:35 +0000726 private:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000727 // The location (register) of the marked object reference.
728 const Location ref_;
729
730 // The location of the entrypoint if it is already loaded.
731 const Location entrypoint_;
732
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000733 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
734};
735
Roland Levillain47b3ab22017-02-27 14:31:35 +0000736// Slow path marking an object reference `ref` during a read barrier,
737// and if needed, atomically updating the field `obj.field` in the
738// object `obj` holding this reference after marking (contrary to
739// ReadBarrierMarkSlowPathARM64 above, which never tries to update
740// `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100741//
742// This means that after the execution of this slow path, both `ref`
743// and `obj.field` will be up-to-date; i.e., after the flip, both will
744// hold the same to-space reference (unless another thread installed
745// another object reference (different from `ref`) in `obj.field`).
Roland Levillain47b3ab22017-02-27 14:31:35 +0000746class ReadBarrierMarkAndUpdateFieldSlowPathARM64 : public SlowPathCodeARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100747 public:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000748 ReadBarrierMarkAndUpdateFieldSlowPathARM64(HInstruction* instruction,
749 Location ref,
750 Register obj,
751 Location field_offset,
Roland Levillain35345a52017-02-27 14:32:08 +0000752 Register temp)
Roland Levillain47b3ab22017-02-27 14:31:35 +0000753 : SlowPathCodeARM64(instruction),
754 ref_(ref),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100755 obj_(obj),
Roland Levillain47b3ab22017-02-27 14:31:35 +0000756 field_offset_(field_offset),
Roland Levillain35345a52017-02-27 14:32:08 +0000757 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100758 DCHECK(kEmitCompilerReadBarrier);
759 }
760
761 const char* GetDescription() const OVERRIDE {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000762 return "ReadBarrierMarkAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100763 }
764
765 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
766 LocationSummary* locations = instruction_->GetLocations();
767 Register ref_reg = WRegisterFrom(ref_);
768 DCHECK(locations->CanCall());
769 DCHECK(ref_.IsRegister()) << ref_;
770 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain47b3ab22017-02-27 14:31:35 +0000771 // This slow path is only used by the UnsafeCASObject intrinsic.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100772 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
773 << "Unexpected instruction in read barrier marking and field updating slow path: "
774 << instruction_->DebugName();
775 DCHECK(instruction_->GetLocations()->Intrinsified());
776 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000777 DCHECK(field_offset_.IsRegister()) << field_offset_;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100778
779 __ Bind(GetEntryLabel());
780
Roland Levillain47b3ab22017-02-27 14:31:35 +0000781 // Save the old reference.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100782 // Note that we cannot use IP to save the old reference, as IP is
783 // used internally by the ReadBarrierMarkRegX entry point, and we
784 // need the old reference after the call to that entry point.
785 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
786 __ Mov(temp_.W(), ref_reg);
787
Roland Levillain47b3ab22017-02-27 14:31:35 +0000788 // No need to save live registers; it's taken care of by the
789 // entrypoint. Also, there is no need to update the stack mask,
790 // as this runtime call will not trigger a garbage collection.
791 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
792 DCHECK_NE(ref_.reg(), LR);
793 DCHECK_NE(ref_.reg(), WSP);
794 DCHECK_NE(ref_.reg(), WZR);
795 // IP0 is used internally by the ReadBarrierMarkRegX entry point
796 // as a temporary, it cannot be the entry point's input/output.
797 DCHECK_NE(ref_.reg(), IP0);
798 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
799 // "Compact" slow path, saving two moves.
800 //
801 // Instead of using the standard runtime calling convention (input
802 // and output in W0):
803 //
804 // W0 <- ref
805 // W0 <- ReadBarrierMark(W0)
806 // ref <- W0
807 //
808 // we just use rX (the register containing `ref`) as input and output
809 // of a dedicated entrypoint:
810 //
811 // rX <- ReadBarrierMarkRegX(rX)
812 //
Roland Levillain35345a52017-02-27 14:32:08 +0000813 int32_t entry_point_offset =
814 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
815 // This runtime call does not require a stack map.
816 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100817
818 // If the new reference is different from the old reference,
Roland Levillain47b3ab22017-02-27 14:31:35 +0000819 // update the field in the holder (`*(obj_ + field_offset_)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100820 //
821 // Note that this field could also hold a different object, if
822 // another thread had concurrently changed it. In that case, the
823 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
824 // (CAS) operation below would abort the CAS, leaving the field
825 // as-is.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000826 vixl::aarch64::Label done;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100827 __ Cmp(temp_.W(), ref_reg);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000828 __ B(eq, &done);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100829
830 // Update the the holder's field atomically. This may fail if
831 // mutator updates before us, but it's OK. This is achieved
832 // using a strong compare-and-set (CAS) operation with relaxed
833 // memory synchronization ordering, where the expected value is
834 // the old reference and the desired value is the new reference.
835
836 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
837 UseScratchRegisterScope temps(masm);
838
839 // Convenience aliases.
840 Register base = obj_.W();
Roland Levillain47b3ab22017-02-27 14:31:35 +0000841 Register offset = XRegisterFrom(field_offset_);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100842 Register expected = temp_.W();
843 Register value = ref_reg;
844 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
845 Register tmp_value = temps.AcquireW(); // Value in memory.
846
847 __ Add(tmp_ptr, base.X(), Operand(offset));
848
849 if (kPoisonHeapReferences) {
850 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
851 if (value.Is(expected)) {
852 // Do not poison `value`, as it is the same register as
853 // `expected`, which has just been poisoned.
854 } else {
855 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
856 }
857 }
858
859 // do {
860 // tmp_value = [tmp_ptr] - expected;
861 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
862
Roland Levillain24a4d112016-10-26 13:10:46 +0100863 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100864 __ Bind(&loop_head);
865 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
866 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +0100867 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100868 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
869 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +0100870 __ B(&exit_loop);
871 __ Bind(&comparison_failed);
872 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100873 __ Bind(&exit_loop);
874
875 if (kPoisonHeapReferences) {
876 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
877 if (value.Is(expected)) {
878 // Do not unpoison `value`, as it is the same register as
879 // `expected`, which has just been unpoisoned.
880 } else {
881 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
882 }
883 }
884
Roland Levillain47b3ab22017-02-27 14:31:35 +0000885 __ Bind(&done);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100886 __ B(GetExitLabel());
887 }
888
889 private:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000890 // The location (register) of the marked object reference.
891 const Location ref_;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100892 // The register containing the object holding the marked object reference field.
893 const Register obj_;
Roland Levillain47b3ab22017-02-27 14:31:35 +0000894 // The location of the offset of the marked reference field within `obj_`.
895 Location field_offset_;
896
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100897 const Register temp_;
898
Roland Levillain47b3ab22017-02-27 14:31:35 +0000899 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100900};
901
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000902// Slow path generating a read barrier for a heap reference.
903class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
904 public:
905 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
906 Location out,
907 Location ref,
908 Location obj,
909 uint32_t offset,
910 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000911 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000912 out_(out),
913 ref_(ref),
914 obj_(obj),
915 offset_(offset),
916 index_(index) {
917 DCHECK(kEmitCompilerReadBarrier);
918 // If `obj` is equal to `out` or `ref`, it means the initial object
919 // has been overwritten by (or after) the heap object reference load
920 // to be instrumented, e.g.:
921 //
922 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000923 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000924 //
925 // In that case, we have lost the information about the original
926 // object, and the emitted read barrier cannot work properly.
927 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
928 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
929 }
930
931 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
932 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
933 LocationSummary* locations = instruction_->GetLocations();
934 Primitive::Type type = Primitive::kPrimNot;
935 DCHECK(locations->CanCall());
936 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100937 DCHECK(instruction_->IsInstanceFieldGet() ||
938 instruction_->IsStaticFieldGet() ||
939 instruction_->IsArrayGet() ||
940 instruction_->IsInstanceOf() ||
941 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100942 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000943 << "Unexpected instruction in read barrier for heap reference slow path: "
944 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000945 // The read barrier instrumentation of object ArrayGet
946 // instructions does not support the HIntermediateAddress
947 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000948 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100949 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000950
951 __ Bind(GetEntryLabel());
952
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000953 SaveLiveRegisters(codegen, locations);
954
955 // We may have to change the index's value, but as `index_` is a
956 // constant member (like other "inputs" of this slow path),
957 // introduce a copy of it, `index`.
958 Location index = index_;
959 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100960 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000961 if (instruction_->IsArrayGet()) {
962 // Compute the actual memory offset and store it in `index`.
963 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
964 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
965 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
966 // We are about to change the value of `index_reg` (see the
967 // calls to vixl::MacroAssembler::Lsl and
968 // vixl::MacroAssembler::Mov below), but it has
969 // not been saved by the previous call to
970 // art::SlowPathCode::SaveLiveRegisters, as it is a
971 // callee-save register --
972 // art::SlowPathCode::SaveLiveRegisters does not consider
973 // callee-save registers, as it has been designed with the
974 // assumption that callee-save registers are supposed to be
975 // handled by the called function. So, as a callee-save
976 // register, `index_reg` _would_ eventually be saved onto
977 // the stack, but it would be too late: we would have
978 // changed its value earlier. Therefore, we manually save
979 // it here into another freely available register,
980 // `free_reg`, chosen of course among the caller-save
981 // registers (as a callee-save `free_reg` register would
982 // exhibit the same problem).
983 //
984 // Note we could have requested a temporary register from
985 // the register allocator instead; but we prefer not to, as
986 // this is a slow path, and we know we can find a
987 // caller-save register that is available.
988 Register free_reg = FindAvailableCallerSaveRegister(codegen);
989 __ Mov(free_reg.W(), index_reg);
990 index_reg = free_reg;
991 index = LocationFrom(index_reg);
992 } else {
993 // The initial register stored in `index_` has already been
994 // saved in the call to art::SlowPathCode::SaveLiveRegisters
995 // (as it is not a callee-save register), so we can freely
996 // use it.
997 }
998 // Shifting the index value contained in `index_reg` by the scale
999 // factor (2) cannot overflow in practice, as the runtime is
1000 // unable to allocate object arrays with a size larger than
1001 // 2^26 - 1 (that is, 2^28 - 4 bytes).
1002 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
1003 static_assert(
1004 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1005 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1006 __ Add(index_reg, index_reg, Operand(offset_));
1007 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001008 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1009 // intrinsics, `index_` is not shifted by a scale factor of 2
1010 // (as in the case of ArrayGet), as it is actually an offset
1011 // to an object field within an object.
1012 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001013 DCHECK(instruction_->GetLocations()->Intrinsified());
1014 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1015 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1016 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001017 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001018 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001019 }
1020 }
1021
1022 // We're moving two or three locations to locations that could
1023 // overlap, so we need a parallel move resolver.
1024 InvokeRuntimeCallingConvention calling_convention;
1025 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
1026 parallel_move.AddMove(ref_,
1027 LocationFrom(calling_convention.GetRegisterAt(0)),
1028 type,
1029 nullptr);
1030 parallel_move.AddMove(obj_,
1031 LocationFrom(calling_convention.GetRegisterAt(1)),
1032 type,
1033 nullptr);
1034 if (index.IsValid()) {
1035 parallel_move.AddMove(index,
1036 LocationFrom(calling_convention.GetRegisterAt(2)),
1037 Primitive::kPrimInt,
1038 nullptr);
1039 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1040 } else {
1041 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1042 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1043 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001044 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001045 instruction_,
1046 instruction_->GetDexPc(),
1047 this);
1048 CheckEntrypointTypes<
1049 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1050 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1051
1052 RestoreLiveRegisters(codegen, locations);
1053
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001054 __ B(GetExitLabel());
1055 }
1056
1057 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1058
1059 private:
1060 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001061 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1062 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001063 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1064 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1065 return Register(VIXLRegCodeFromART(i), kXRegSize);
1066 }
1067 }
1068 // We shall never fail to find a free caller-save register, as
1069 // there are more than two core caller-save registers on ARM64
1070 // (meaning it is possible to find one which is different from
1071 // `ref` and `obj`).
1072 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1073 LOG(FATAL) << "Could not find a free register";
1074 UNREACHABLE();
1075 }
1076
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001077 const Location out_;
1078 const Location ref_;
1079 const Location obj_;
1080 const uint32_t offset_;
1081 // An additional location containing an index to an array.
1082 // Only used for HArrayGet and the UnsafeGetObject &
1083 // UnsafeGetObjectVolatile intrinsics.
1084 const Location index_;
1085
1086 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1087};
1088
1089// Slow path generating a read barrier for a GC root.
1090class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1091 public:
1092 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001093 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001094 DCHECK(kEmitCompilerReadBarrier);
1095 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001096
1097 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1098 LocationSummary* locations = instruction_->GetLocations();
1099 Primitive::Type type = Primitive::kPrimNot;
1100 DCHECK(locations->CanCall());
1101 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001102 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1103 << "Unexpected instruction in read barrier for GC root slow path: "
1104 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001105
1106 __ Bind(GetEntryLabel());
1107 SaveLiveRegisters(codegen, locations);
1108
1109 InvokeRuntimeCallingConvention calling_convention;
1110 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1111 // The argument of the ReadBarrierForRootSlow is not a managed
1112 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1113 // thus we need a 64-bit move here, and we cannot use
1114 //
1115 // arm64_codegen->MoveLocation(
1116 // LocationFrom(calling_convention.GetRegisterAt(0)),
1117 // root_,
1118 // type);
1119 //
1120 // which would emit a 32-bit move, as `type` is a (32-bit wide)
1121 // reference type (`Primitive::kPrimNot`).
1122 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001123 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001124 instruction_,
1125 instruction_->GetDexPc(),
1126 this);
1127 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1128 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1129
1130 RestoreLiveRegisters(codegen, locations);
1131 __ B(GetExitLabel());
1132 }
1133
1134 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1135
1136 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001137 const Location out_;
1138 const Location root_;
1139
1140 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1141};
1142
Alexandre Rames5319def2014-10-23 10:03:10 +01001143#undef __
1144
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001145Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001146 Location next_location;
1147 if (type == Primitive::kPrimVoid) {
1148 LOG(FATAL) << "Unreachable type " << type;
1149 }
1150
Alexandre Rames542361f2015-01-29 16:57:31 +00001151 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001152 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1153 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +00001154 } else if (!Primitive::IsFloatingPointType(type) &&
1155 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001156 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1157 } else {
1158 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +00001159 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1160 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001161 }
1162
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001163 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +00001164 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001165 return next_location;
1166}
1167
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001168Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001169 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001170}
1171
Serban Constantinescu579885a2015-02-22 20:51:33 +00001172CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
1173 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001174 const CompilerOptions& compiler_options,
1175 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001176 : CodeGenerator(graph,
1177 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001178 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001179 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001180 callee_saved_core_registers.GetList(),
1181 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001182 compiler_options,
1183 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001184 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +08001185 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001186 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001187 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +00001188 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001189 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001190 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001191 uint32_literals_(std::less<uint32_t>(),
1192 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001193 uint64_literals_(std::less<uint64_t>(),
1194 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001195 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1196 boot_image_string_patches_(StringReferenceValueComparator(),
1197 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1198 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001199 boot_image_type_patches_(TypeReferenceValueComparator(),
1200 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1201 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001202 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001203 boot_image_address_patches_(std::less<uint32_t>(),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001204 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1205 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001206 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1207 jit_class_patches_(TypeReferenceValueComparator(),
1208 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001209 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001210 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001211}
Alexandre Rames5319def2014-10-23 10:03:10 +01001212
Alexandre Rames67555f72014-11-18 10:55:16 +00001213#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001214
Zheng Xu3927c8b2015-11-18 17:46:25 +08001215void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001216 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001217 jump_table->EmitTable(this);
1218 }
1219}
1220
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001221void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001222 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001223 // Ensure we emit the literal pool.
1224 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001225
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001226 CodeGenerator::Finalize(allocator);
1227}
1228
Zheng Xuad4450e2015-04-17 18:48:56 +08001229void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1230 // Note: There are 6 kinds of moves:
1231 // 1. constant -> GPR/FPR (non-cycle)
1232 // 2. constant -> stack (non-cycle)
1233 // 3. GPR/FPR -> GPR/FPR
1234 // 4. GPR/FPR -> stack
1235 // 5. stack -> GPR/FPR
1236 // 6. stack -> stack (non-cycle)
1237 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1238 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1239 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1240 // dependency.
1241 vixl_temps_.Open(GetVIXLAssembler());
1242}
1243
1244void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1245 vixl_temps_.Close();
1246}
1247
1248Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1249 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1250 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1251 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1252 Location scratch = GetScratchLocation(kind);
1253 if (!scratch.Equals(Location::NoLocation())) {
1254 return scratch;
1255 }
1256 // Allocate from VIXL temp registers.
1257 if (kind == Location::kRegister) {
1258 scratch = LocationFrom(vixl_temps_.AcquireX());
1259 } else {
1260 DCHECK(kind == Location::kFpuRegister);
1261 scratch = LocationFrom(vixl_temps_.AcquireD());
1262 }
1263 AddScratchLocation(scratch);
1264 return scratch;
1265}
1266
1267void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1268 if (loc.IsRegister()) {
1269 vixl_temps_.Release(XRegisterFrom(loc));
1270 } else {
1271 DCHECK(loc.IsFpuRegister());
1272 vixl_temps_.Release(DRegisterFrom(loc));
1273 }
1274 RemoveScratchLocation(loc);
1275}
1276
Alexandre Rames3e69f162014-12-10 10:36:50 +00001277void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001278 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001279 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001280}
1281
Alexandre Rames5319def2014-10-23 10:03:10 +01001282void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001283 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001284 __ Bind(&frame_entry_label_);
1285
Serban Constantinescu02164b32014-11-13 14:05:07 +00001286 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1287 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001288 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001289 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001290 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001291 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001292 {
1293 // Ensure that between load and RecordPcInfo there are no pools emitted.
1294 ExactAssemblyScope eas(GetVIXLAssembler(),
1295 kInstructionSize,
1296 CodeBufferCheckScope::kExactSize);
1297 __ ldr(wzr, MemOperand(temp, 0));
1298 RecordPcInfo(nullptr, 0);
1299 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001300 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001301
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001302 if (!HasEmptyFrame()) {
1303 int frame_size = GetFrameSize();
1304 // Stack layout:
1305 // sp[frame_size - 8] : lr.
1306 // ... : other preserved core registers.
1307 // ... : other preserved fp registers.
1308 // ... : reserved frame space.
1309 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001310
1311 // Save the current method if we need it. Note that we do not
1312 // do this in HCurrentMethod, as the instruction might have been removed
1313 // in the SSA graph.
1314 if (RequiresCurrentMethod()) {
1315 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001316 } else {
1317 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001318 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001319 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001320 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1321 frame_size - GetCoreSpillSize());
1322 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1323 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001324
1325 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1326 // Initialize should_deoptimize flag to 0.
1327 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1328 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1329 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001330 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001331}
1332
1333void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001334 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001335 if (!HasEmptyFrame()) {
1336 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001337 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1338 frame_size - FrameEntrySpillSize());
1339 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1340 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001341 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001342 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001343 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001344 __ Ret();
1345 GetAssembler()->cfi().RestoreState();
1346 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001347}
1348
Scott Wakeling97c72b72016-06-24 16:19:36 +01001349CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001350 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001351 return CPURegList(CPURegister::kRegister, kXRegSize,
1352 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001353}
1354
Scott Wakeling97c72b72016-06-24 16:19:36 +01001355CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001356 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1357 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001358 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1359 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001360}
1361
Alexandre Rames5319def2014-10-23 10:03:10 +01001362void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1363 __ Bind(GetLabelOf(block));
1364}
1365
Calin Juravle175dc732015-08-25 15:42:32 +01001366void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1367 DCHECK(location.IsRegister());
1368 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1369}
1370
Calin Juravlee460d1d2015-09-29 04:52:17 +01001371void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1372 if (location.IsRegister()) {
1373 locations->AddTemp(location);
1374 } else {
1375 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1376 }
1377}
1378
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001379void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001380 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001381 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001382 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001383 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001384 if (value_can_be_null) {
1385 __ Cbz(value, &done);
1386 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001387 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001388 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001389 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001390 if (value_can_be_null) {
1391 __ Bind(&done);
1392 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001393}
1394
David Brazdil58282f42016-01-14 12:45:10 +00001395void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001396 // Blocked core registers:
1397 // lr : Runtime reserved.
1398 // tr : Runtime reserved.
1399 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1400 // ip1 : VIXL core temp.
1401 // ip0 : VIXL core temp.
1402 //
1403 // Blocked fp registers:
1404 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001405 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1406 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001407 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001408 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001409 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001410
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001411 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001412 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001413 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001414 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001415
David Brazdil58282f42016-01-14 12:45:10 +00001416 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001417 // Stubs do not save callee-save floating point registers. If the graph
1418 // is debuggable, we need to deal with these registers differently. For
1419 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001420 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1421 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001422 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001423 }
1424 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001425}
1426
Alexandre Rames3e69f162014-12-10 10:36:50 +00001427size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1428 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1429 __ Str(reg, MemOperand(sp, stack_index));
1430 return kArm64WordSize;
1431}
1432
1433size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1434 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1435 __ Ldr(reg, MemOperand(sp, stack_index));
1436 return kArm64WordSize;
1437}
1438
1439size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1440 FPRegister reg = FPRegister(reg_id, kDRegSize);
1441 __ Str(reg, MemOperand(sp, stack_index));
1442 return kArm64WordSize;
1443}
1444
1445size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1446 FPRegister reg = FPRegister(reg_id, kDRegSize);
1447 __ Ldr(reg, MemOperand(sp, stack_index));
1448 return kArm64WordSize;
1449}
1450
Alexandre Rames5319def2014-10-23 10:03:10 +01001451void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001452 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001453}
1454
1455void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001456 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001457}
1458
Alexandre Rames67555f72014-11-18 10:55:16 +00001459void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001460 if (constant->IsIntConstant()) {
1461 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1462 } else if (constant->IsLongConstant()) {
1463 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1464 } else if (constant->IsNullConstant()) {
1465 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001466 } else if (constant->IsFloatConstant()) {
1467 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1468 } else {
1469 DCHECK(constant->IsDoubleConstant());
1470 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1471 }
1472}
1473
Alexandre Rames3e69f162014-12-10 10:36:50 +00001474
1475static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1476 DCHECK(constant.IsConstant());
1477 HConstant* cst = constant.GetConstant();
1478 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001479 // Null is mapped to a core W register, which we associate with kPrimInt.
1480 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001481 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1482 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1483 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1484}
1485
Roland Levillain558dea12017-01-27 19:40:44 +00001486// Allocate a scratch register from the VIXL pool, querying first into
1487// the floating-point register pool, and then the the core register
1488// pool. This is essentially a reimplementation of
1489// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1490// using a different allocation strategy.
1491static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1492 vixl::aarch64::UseScratchRegisterScope* temps,
1493 int size_in_bits) {
1494 return masm->GetScratchFPRegisterList()->IsEmpty()
1495 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1496 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1497}
1498
Calin Juravlee460d1d2015-09-29 04:52:17 +01001499void CodeGeneratorARM64::MoveLocation(Location destination,
1500 Location source,
1501 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001502 if (source.Equals(destination)) {
1503 return;
1504 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001505
1506 // A valid move can always be inferred from the destination and source
1507 // locations. When moving from and to a register, the argument type can be
1508 // used to generate 32bit instead of 64bit moves. In debug mode we also
1509 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001510 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001511
1512 if (destination.IsRegister() || destination.IsFpuRegister()) {
1513 if (unspecified_type) {
1514 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1515 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001516 (src_cst != nullptr && (src_cst->IsIntConstant()
1517 || src_cst->IsFloatConstant()
1518 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001519 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001520 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001521 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001522 // If the source is a double stack slot or a 64bit constant, a 64bit
1523 // type is appropriate. Else the source is a register, and since the
1524 // type has not been specified, we chose a 64bit type to force a 64bit
1525 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001526 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001527 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001528 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001529 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1530 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1531 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001532 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1533 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1534 __ Ldr(dst, StackOperandFrom(source));
1535 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001536 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001537 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001538 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001539 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001540 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001541 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001542 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001543 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1544 ? Primitive::kPrimLong
1545 : Primitive::kPrimInt;
1546 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1547 }
1548 } else {
1549 DCHECK(source.IsFpuRegister());
1550 if (destination.IsRegister()) {
1551 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1552 ? Primitive::kPrimDouble
1553 : Primitive::kPrimFloat;
1554 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1555 } else {
1556 DCHECK(destination.IsFpuRegister());
1557 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001558 }
1559 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001560 } else { // The destination is not a register. It must be a stack slot.
1561 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1562 if (source.IsRegister() || source.IsFpuRegister()) {
1563 if (unspecified_type) {
1564 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001565 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001566 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001567 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001568 }
1569 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001570 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1571 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1572 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001573 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001574 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1575 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001576 UseScratchRegisterScope temps(GetVIXLAssembler());
1577 HConstant* src_cst = source.GetConstant();
1578 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001579 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001580 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1581 ? Register(xzr)
1582 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001583 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001584 if (src_cst->IsIntConstant()) {
1585 temp = temps.AcquireW();
1586 } else if (src_cst->IsLongConstant()) {
1587 temp = temps.AcquireX();
1588 } else if (src_cst->IsFloatConstant()) {
1589 temp = temps.AcquireS();
1590 } else {
1591 DCHECK(src_cst->IsDoubleConstant());
1592 temp = temps.AcquireD();
1593 }
1594 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001595 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001596 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001597 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001598 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001599 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001600 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001601 // Use any scratch register (a core or a floating-point one)
1602 // from VIXL scratch register pools as a temporary.
1603 //
1604 // We used to only use the FP scratch register pool, but in some
1605 // rare cases the only register from this pool (D31) would
1606 // already be used (e.g. within a ParallelMove instruction, when
1607 // a move is blocked by a another move requiring a scratch FP
1608 // register, which would reserve D31). To prevent this issue, we
1609 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001610 //
1611 // Also, we start by asking for a FP scratch register first, as the
1612 // demand of scratch core registers is higher. This is why we
1613 // use AcquireFPOrCoreCPURegisterOfSize instead of
1614 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1615 // allocates core scratch registers first.
1616 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1617 GetVIXLAssembler(),
1618 &temps,
1619 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001620 __ Ldr(temp, StackOperandFrom(source));
1621 __ Str(temp, StackOperandFrom(destination));
1622 }
1623 }
1624}
1625
1626void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001627 CPURegister dst,
1628 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001629 switch (type) {
1630 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001631 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001632 break;
1633 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001634 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001635 break;
1636 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001637 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001638 break;
1639 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001640 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001641 break;
1642 case Primitive::kPrimInt:
1643 case Primitive::kPrimNot:
1644 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001645 case Primitive::kPrimFloat:
1646 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001647 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001648 __ Ldr(dst, src);
1649 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001650 case Primitive::kPrimVoid:
1651 LOG(FATAL) << "Unreachable type " << type;
1652 }
1653}
1654
Calin Juravle77520bc2015-01-12 18:45:46 +00001655void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001656 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001657 const MemOperand& src,
1658 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001659 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001660 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001661 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001662 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001663
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001664 DCHECK(!src.IsPreIndex());
1665 DCHECK(!src.IsPostIndex());
1666
1667 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001668 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001669 {
1670 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1671 MemOperand base = MemOperand(temp_base);
1672 switch (type) {
1673 case Primitive::kPrimBoolean:
1674 {
1675 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1676 __ ldarb(Register(dst), base);
1677 if (needs_null_check) {
1678 MaybeRecordImplicitNullCheck(instruction);
1679 }
1680 }
1681 break;
1682 case Primitive::kPrimByte:
1683 {
1684 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1685 __ ldarb(Register(dst), base);
1686 if (needs_null_check) {
1687 MaybeRecordImplicitNullCheck(instruction);
1688 }
1689 }
1690 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1691 break;
1692 case Primitive::kPrimChar:
1693 {
1694 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1695 __ ldarh(Register(dst), base);
1696 if (needs_null_check) {
1697 MaybeRecordImplicitNullCheck(instruction);
1698 }
1699 }
1700 break;
1701 case Primitive::kPrimShort:
1702 {
1703 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1704 __ ldarh(Register(dst), base);
1705 if (needs_null_check) {
1706 MaybeRecordImplicitNullCheck(instruction);
1707 }
1708 }
1709 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1710 break;
1711 case Primitive::kPrimInt:
1712 case Primitive::kPrimNot:
1713 case Primitive::kPrimLong:
1714 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
1715 {
1716 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1717 __ ldar(Register(dst), base);
1718 if (needs_null_check) {
1719 MaybeRecordImplicitNullCheck(instruction);
1720 }
1721 }
1722 break;
1723 case Primitive::kPrimFloat:
1724 case Primitive::kPrimDouble: {
1725 DCHECK(dst.IsFPRegister());
1726 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001727
Artem Serov914d7a82017-02-07 14:33:49 +00001728 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1729 {
1730 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1731 __ ldar(temp, base);
1732 if (needs_null_check) {
1733 MaybeRecordImplicitNullCheck(instruction);
1734 }
1735 }
1736 __ Fmov(FPRegister(dst), temp);
1737 break;
Roland Levillain44015862016-01-22 11:47:17 +00001738 }
Artem Serov914d7a82017-02-07 14:33:49 +00001739 case Primitive::kPrimVoid:
1740 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001741 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001742 }
1743}
1744
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001745void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001746 CPURegister src,
1747 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001748 switch (type) {
1749 case Primitive::kPrimBoolean:
1750 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001751 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001752 break;
1753 case Primitive::kPrimChar:
1754 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001755 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001756 break;
1757 case Primitive::kPrimInt:
1758 case Primitive::kPrimNot:
1759 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001760 case Primitive::kPrimFloat:
1761 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001762 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001763 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001764 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001765 case Primitive::kPrimVoid:
1766 LOG(FATAL) << "Unreachable type " << type;
1767 }
1768}
1769
Artem Serov914d7a82017-02-07 14:33:49 +00001770void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
1771 Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001772 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001773 const MemOperand& dst,
1774 bool needs_null_check) {
1775 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001776 UseScratchRegisterScope temps(GetVIXLAssembler());
1777 Register temp_base = temps.AcquireX();
1778
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001779 DCHECK(!dst.IsPreIndex());
1780 DCHECK(!dst.IsPostIndex());
1781
1782 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001783 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001784 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001785 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001786 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001787 switch (type) {
1788 case Primitive::kPrimBoolean:
1789 case Primitive::kPrimByte:
Artem Serov914d7a82017-02-07 14:33:49 +00001790 {
1791 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1792 __ stlrb(Register(src), base);
1793 if (needs_null_check) {
1794 MaybeRecordImplicitNullCheck(instruction);
1795 }
1796 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001797 break;
1798 case Primitive::kPrimChar:
1799 case Primitive::kPrimShort:
Artem Serov914d7a82017-02-07 14:33:49 +00001800 {
1801 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1802 __ stlrh(Register(src), base);
1803 if (needs_null_check) {
1804 MaybeRecordImplicitNullCheck(instruction);
1805 }
1806 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001807 break;
1808 case Primitive::kPrimInt:
1809 case Primitive::kPrimNot:
1810 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001811 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001812 {
1813 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1814 __ stlr(Register(src), base);
1815 if (needs_null_check) {
1816 MaybeRecordImplicitNullCheck(instruction);
1817 }
1818 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001819 break;
1820 case Primitive::kPrimFloat:
1821 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001822 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001823 Register temp_src;
1824 if (src.IsZero()) {
1825 // The zero register is used to avoid synthesizing zero constants.
1826 temp_src = Register(src);
1827 } else {
1828 DCHECK(src.IsFPRegister());
1829 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1830 __ Fmov(temp_src, FPRegister(src));
1831 }
Artem Serov914d7a82017-02-07 14:33:49 +00001832 {
1833 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1834 __ stlr(temp_src, base);
1835 if (needs_null_check) {
1836 MaybeRecordImplicitNullCheck(instruction);
1837 }
1838 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001839 break;
1840 }
1841 case Primitive::kPrimVoid:
1842 LOG(FATAL) << "Unreachable type " << type;
1843 }
1844}
1845
Calin Juravle175dc732015-08-25 15:42:32 +01001846void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1847 HInstruction* instruction,
1848 uint32_t dex_pc,
1849 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001850 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001851
1852 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
1853 {
1854 // Ensure the pc position is recorded immediately after the `blr` instruction.
1855 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1856 __ blr(lr);
1857 if (EntrypointRequiresStackMap(entrypoint)) {
1858 RecordPcInfo(instruction, dex_pc, slow_path);
1859 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001860 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001861}
1862
Roland Levillaindec8f632016-07-22 17:10:06 +01001863void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1864 HInstruction* instruction,
1865 SlowPathCode* slow_path) {
1866 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001867 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1868 __ Blr(lr);
1869}
1870
Alexandre Rames67555f72014-11-18 10:55:16 +00001871void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001872 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001873 UseScratchRegisterScope temps(GetVIXLAssembler());
1874 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001875 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1876
Serban Constantinescu02164b32014-11-13 14:05:07 +00001877 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001878 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1879 __ Add(temp, class_reg, status_offset);
1880 __ Ldar(temp, HeapOperand(temp));
1881 __ Cmp(temp, mirror::Class::kStatusInitialized);
1882 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001883 __ Bind(slow_path->GetExitLabel());
1884}
Alexandre Rames5319def2014-10-23 10:03:10 +01001885
Roland Levillain44015862016-01-22 11:47:17 +00001886void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001887 BarrierType type = BarrierAll;
1888
1889 switch (kind) {
1890 case MemBarrierKind::kAnyAny:
1891 case MemBarrierKind::kAnyStore: {
1892 type = BarrierAll;
1893 break;
1894 }
1895 case MemBarrierKind::kLoadAny: {
1896 type = BarrierReads;
1897 break;
1898 }
1899 case MemBarrierKind::kStoreStore: {
1900 type = BarrierWrites;
1901 break;
1902 }
1903 default:
1904 LOG(FATAL) << "Unexpected memory barrier " << kind;
1905 }
1906 __ Dmb(InnerShareable, type);
1907}
1908
Serban Constantinescu02164b32014-11-13 14:05:07 +00001909void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1910 HBasicBlock* successor) {
1911 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001912 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1913 if (slow_path == nullptr) {
1914 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1915 instruction->SetSlowPath(slow_path);
1916 codegen_->AddSlowPath(slow_path);
1917 if (successor != nullptr) {
1918 DCHECK(successor->IsLoopHeader());
1919 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1920 }
1921 } else {
1922 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1923 }
1924
Serban Constantinescu02164b32014-11-13 14:05:07 +00001925 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1926 Register temp = temps.AcquireW();
1927
Andreas Gampe542451c2016-07-26 09:02:02 -07001928 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001929 if (successor == nullptr) {
1930 __ Cbnz(temp, slow_path->GetEntryLabel());
1931 __ Bind(slow_path->GetReturnLabel());
1932 } else {
1933 __ Cbz(temp, codegen_->GetLabelOf(successor));
1934 __ B(slow_path->GetEntryLabel());
1935 // slow_path will return to GetLabelOf(successor).
1936 }
1937}
1938
Alexandre Rames5319def2014-10-23 10:03:10 +01001939InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1940 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001941 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001942 assembler_(codegen->GetAssembler()),
1943 codegen_(codegen) {}
1944
1945#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001946 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001947
1948#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1949
1950enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001951 // Using a base helps identify when we hit such breakpoints.
1952 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001953#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1954 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1955#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1956};
1957
1958#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001959 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001960 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1961 } \
1962 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1963 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1964 locations->SetOut(Location::Any()); \
1965 }
1966 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1967#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1968
1969#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001970#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001971
Alexandre Rames67555f72014-11-18 10:55:16 +00001972void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001973 DCHECK_EQ(instr->InputCount(), 2U);
1974 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1975 Primitive::Type type = instr->GetResultType();
1976 switch (type) {
1977 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001978 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001979 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001980 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001981 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001982 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001983
1984 case Primitive::kPrimFloat:
1985 case Primitive::kPrimDouble:
1986 locations->SetInAt(0, Location::RequiresFpuRegister());
1987 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001988 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001989 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001990
Alexandre Rames5319def2014-10-23 10:03:10 +01001991 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001992 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001993 }
1994}
1995
Alexandre Rames09a99962015-04-15 11:47:56 +01001996void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001997 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1998
1999 bool object_field_get_with_read_barrier =
2000 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01002001 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002002 new (GetGraph()->GetArena()) LocationSummary(instruction,
2003 object_field_get_with_read_barrier ?
2004 LocationSummary::kCallOnSlowPath :
2005 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002006 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002007 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00002008 // We need a temporary register for the read barrier marking slow
2009 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
2010 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko70e97462016-08-09 11:04:26 +01002011 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002012 locations->SetInAt(0, Location::RequiresRegister());
2013 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2014 locations->SetOut(Location::RequiresFpuRegister());
2015 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002016 // The output overlaps for an object field get when read barriers
2017 // are enabled: we do not want the load to overwrite the object's
2018 // location, as we need it to emit the read barrier.
2019 locations->SetOut(
2020 Location::RequiresRegister(),
2021 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002022 }
2023}
2024
2025void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2026 const FieldInfo& field_info) {
2027 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002028 LocationSummary* locations = instruction->GetLocations();
2029 Location base_loc = locations->InAt(0);
2030 Location out = locations->Out();
2031 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01002032 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002033 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002034
Roland Levillain44015862016-01-22 11:47:17 +00002035 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2036 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002037 // /* HeapReference<Object> */ out = *(base + offset)
2038 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
Roland Levillaind0b51832017-01-26 19:04:23 +00002039 Register temp = WRegisterFrom(locations->GetTemp(0));
Roland Levillain44015862016-01-22 11:47:17 +00002040 // Note that potential implicit null checks are handled in this
2041 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2042 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2043 instruction,
2044 out,
2045 base,
2046 offset,
2047 temp,
2048 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002049 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002050 } else {
2051 // General case.
2052 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002053 // Note that a potential implicit null check is handled in this
2054 // CodeGeneratorARM64::LoadAcquire call.
2055 // NB: LoadAcquire will record the pc info if needed.
2056 codegen_->LoadAcquire(
2057 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002058 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002059 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2060 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002061 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002062 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002063 }
Roland Levillain44015862016-01-22 11:47:17 +00002064 if (field_type == Primitive::kPrimNot) {
2065 // If read barriers are enabled, emit read barriers other than
2066 // Baker's using a slow path (and also unpoison the loaded
2067 // reference, if heap poisoning is enabled).
2068 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2069 }
Roland Levillain4d027112015-07-01 15:41:14 +01002070 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002071}
2072
2073void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2074 LocationSummary* locations =
2075 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2076 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002077 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2078 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2079 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002080 locations->SetInAt(1, Location::RequiresFpuRegister());
2081 } else {
2082 locations->SetInAt(1, Location::RequiresRegister());
2083 }
2084}
2085
2086void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002087 const FieldInfo& field_info,
2088 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002089 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2090
2091 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002092 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002093 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002094 Offset offset = field_info.GetFieldOffset();
2095 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002096
Roland Levillain4d027112015-07-01 15:41:14 +01002097 {
2098 // We use a block to end the scratch scope before the write barrier, thus
2099 // freeing the temporary registers so they can be used in `MarkGCCard`.
2100 UseScratchRegisterScope temps(GetVIXLAssembler());
2101
2102 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
2103 DCHECK(value.IsW());
2104 Register temp = temps.AcquireW();
2105 __ Mov(temp, value.W());
2106 GetAssembler()->PoisonHeapReference(temp.W());
2107 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002108 }
Roland Levillain4d027112015-07-01 15:41:14 +01002109
2110 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002111 codegen_->StoreRelease(
2112 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002113 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002114 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2115 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002116 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2117 codegen_->MaybeRecordImplicitNullCheck(instruction);
2118 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002119 }
2120
2121 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002122 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002123 }
2124}
2125
Alexandre Rames67555f72014-11-18 10:55:16 +00002126void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002127 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002128
2129 switch (type) {
2130 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002131 case Primitive::kPrimLong: {
2132 Register dst = OutputRegister(instr);
2133 Register lhs = InputRegisterAt(instr, 0);
2134 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002135 if (instr->IsAdd()) {
2136 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002137 } else if (instr->IsAnd()) {
2138 __ And(dst, lhs, rhs);
2139 } else if (instr->IsOr()) {
2140 __ Orr(dst, lhs, rhs);
2141 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002142 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002143 } else if (instr->IsRor()) {
2144 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002145 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002146 __ Ror(dst, lhs, shift);
2147 } else {
2148 // Ensure shift distance is in the same size register as the result. If
2149 // we are rotating a long and the shift comes in a w register originally,
2150 // we don't need to sxtw for use as an x since the shift distances are
2151 // all & reg_bits - 1.
2152 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2153 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002154 } else {
2155 DCHECK(instr->IsXor());
2156 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002157 }
2158 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002159 }
2160 case Primitive::kPrimFloat:
2161 case Primitive::kPrimDouble: {
2162 FPRegister dst = OutputFPRegister(instr);
2163 FPRegister lhs = InputFPRegisterAt(instr, 0);
2164 FPRegister rhs = InputFPRegisterAt(instr, 1);
2165 if (instr->IsAdd()) {
2166 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002167 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002168 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002169 } else {
2170 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002171 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002172 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002173 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002174 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002175 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002176 }
2177}
2178
Serban Constantinescu02164b32014-11-13 14:05:07 +00002179void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2180 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2181
2182 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2183 Primitive::Type type = instr->GetResultType();
2184 switch (type) {
2185 case Primitive::kPrimInt:
2186 case Primitive::kPrimLong: {
2187 locations->SetInAt(0, Location::RequiresRegister());
2188 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2189 locations->SetOut(Location::RequiresRegister());
2190 break;
2191 }
2192 default:
2193 LOG(FATAL) << "Unexpected shift type " << type;
2194 }
2195}
2196
2197void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2198 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2199
2200 Primitive::Type type = instr->GetType();
2201 switch (type) {
2202 case Primitive::kPrimInt:
2203 case Primitive::kPrimLong: {
2204 Register dst = OutputRegister(instr);
2205 Register lhs = InputRegisterAt(instr, 0);
2206 Operand rhs = InputOperandAt(instr, 1);
2207 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002208 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00002209 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002210 if (instr->IsShl()) {
2211 __ Lsl(dst, lhs, shift_value);
2212 } else if (instr->IsShr()) {
2213 __ Asr(dst, lhs, shift_value);
2214 } else {
2215 __ Lsr(dst, lhs, shift_value);
2216 }
2217 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002218 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002219
2220 if (instr->IsShl()) {
2221 __ Lsl(dst, lhs, rhs_reg);
2222 } else if (instr->IsShr()) {
2223 __ Asr(dst, lhs, rhs_reg);
2224 } else {
2225 __ Lsr(dst, lhs, rhs_reg);
2226 }
2227 }
2228 break;
2229 }
2230 default:
2231 LOG(FATAL) << "Unexpected shift operation type " << type;
2232 }
2233}
2234
Alexandre Rames5319def2014-10-23 10:03:10 +01002235void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002236 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002237}
2238
2239void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002240 HandleBinaryOp(instruction);
2241}
2242
2243void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2244 HandleBinaryOp(instruction);
2245}
2246
2247void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2248 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002249}
2250
Artem Serov7fc63502016-02-09 17:15:29 +00002251void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002252 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
2253 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2254 locations->SetInAt(0, Location::RequiresRegister());
2255 // There is no immediate variant of negated bitwise instructions in AArch64.
2256 locations->SetInAt(1, Location::RequiresRegister());
2257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2258}
2259
Artem Serov7fc63502016-02-09 17:15:29 +00002260void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002261 Register dst = OutputRegister(instr);
2262 Register lhs = InputRegisterAt(instr, 0);
2263 Register rhs = InputRegisterAt(instr, 1);
2264
2265 switch (instr->GetOpKind()) {
2266 case HInstruction::kAnd:
2267 __ Bic(dst, lhs, rhs);
2268 break;
2269 case HInstruction::kOr:
2270 __ Orn(dst, lhs, rhs);
2271 break;
2272 case HInstruction::kXor:
2273 __ Eon(dst, lhs, rhs);
2274 break;
2275 default:
2276 LOG(FATAL) << "Unreachable";
2277 }
2278}
2279
Anton Kirilov74234da2017-01-13 14:42:47 +00002280void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2281 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002282 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
2283 instruction->GetType() == Primitive::kPrimLong);
2284 LocationSummary* locations =
2285 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2286 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2287 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2288 } else {
2289 locations->SetInAt(0, Location::RequiresRegister());
2290 }
2291 locations->SetInAt(1, Location::RequiresRegister());
2292 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2293}
2294
Anton Kirilov74234da2017-01-13 14:42:47 +00002295void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2296 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002297 Primitive::Type type = instruction->GetType();
2298 HInstruction::InstructionKind kind = instruction->GetInstrKind();
2299 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2300 Register out = OutputRegister(instruction);
2301 Register left;
2302 if (kind != HInstruction::kNeg) {
2303 left = InputRegisterAt(instruction, 0);
2304 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002305 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002306 // shifter operand operation, the IR generating `right_reg` (input to the type
2307 // conversion) can have a different type from the current instruction's type,
2308 // so we manually indicate the type.
2309 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002310 Operand right_operand(0);
2311
Anton Kirilov74234da2017-01-13 14:42:47 +00002312 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2313 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002314 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2315 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002316 right_operand = Operand(right_reg,
2317 helpers::ShiftFromOpKind(op_kind),
2318 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002319 }
2320
2321 // Logical binary operations do not support extension operations in the
2322 // operand. Note that VIXL would still manage if it was passed by generating
2323 // the extension as a separate instruction.
2324 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2325 DCHECK(!right_operand.IsExtendedRegister() ||
2326 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2327 kind != HInstruction::kNeg));
2328 switch (kind) {
2329 case HInstruction::kAdd:
2330 __ Add(out, left, right_operand);
2331 break;
2332 case HInstruction::kAnd:
2333 __ And(out, left, right_operand);
2334 break;
2335 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002336 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002337 __ Neg(out, right_operand);
2338 break;
2339 case HInstruction::kOr:
2340 __ Orr(out, left, right_operand);
2341 break;
2342 case HInstruction::kSub:
2343 __ Sub(out, left, right_operand);
2344 break;
2345 case HInstruction::kXor:
2346 __ Eor(out, left, right_operand);
2347 break;
2348 default:
2349 LOG(FATAL) << "Unexpected operation kind: " << kind;
2350 UNREACHABLE();
2351 }
2352}
2353
Artem Serov328429f2016-07-06 16:23:04 +01002354void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002355 LocationSummary* locations =
2356 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2357 locations->SetInAt(0, Location::RequiresRegister());
2358 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2359 locations->SetOut(Location::RequiresRegister());
2360}
2361
Roland Levillain19c54192016-11-04 13:44:09 +00002362void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002363 __ Add(OutputRegister(instruction),
2364 InputRegisterAt(instruction, 0),
2365 Operand(InputOperandAt(instruction, 1)));
2366}
2367
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002368void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002369 LocationSummary* locations =
2370 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002371 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2372 if (instr->GetOpKind() == HInstruction::kSub &&
2373 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002374 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002375 // Don't allocate register for Mneg instruction.
2376 } else {
2377 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2378 Location::RequiresRegister());
2379 }
2380 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2381 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002382 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2383}
2384
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002385void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002386 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002387 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2388 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002389
2390 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2391 // This fixup should be carried out for all multiply-accumulate instructions:
2392 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2393 if (instr->GetType() == Primitive::kPrimLong &&
2394 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2395 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002396 vixl::aarch64::Instruction* prev =
2397 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002398 if (prev->IsLoadOrStore()) {
2399 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002400 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002401 __ nop();
2402 }
2403 }
2404
2405 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002406 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002407 __ Madd(res, mul_left, mul_right, accumulator);
2408 } else {
2409 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002410 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002411 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002412 __ Mneg(res, mul_left, mul_right);
2413 } else {
2414 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2415 __ Msub(res, mul_left, mul_right, accumulator);
2416 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002417 }
2418}
2419
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002420void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002421 bool object_array_get_with_read_barrier =
2422 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002423 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002424 new (GetGraph()->GetArena()) LocationSummary(instruction,
2425 object_array_get_with_read_barrier ?
2426 LocationSummary::kCallOnSlowPath :
2427 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002428 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002429 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002430 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002431 locations->SetInAt(0, Location::RequiresRegister());
2432 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002433 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2434 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2435 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002436 // The output overlaps in the case of an object array get with
2437 // read barriers enabled: we do not want the move to overwrite the
2438 // array's location, as we need it to emit the read barrier.
2439 locations->SetOut(
2440 Location::RequiresRegister(),
2441 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002442 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002443}
2444
2445void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002446 Primitive::Type type = instruction->GetType();
2447 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002448 LocationSummary* locations = instruction->GetLocations();
2449 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002450 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002451 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002452 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2453 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002454 MacroAssembler* masm = GetVIXLAssembler();
2455 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002456
Roland Levillain19c54192016-11-04 13:44:09 +00002457 // The read barrier instrumentation of object ArrayGet instructions
2458 // does not support the HIntermediateAddress instruction.
2459 DCHECK(!((type == Primitive::kPrimNot) &&
2460 instruction->GetArray()->IsIntermediateAddress() &&
2461 kEmitCompilerReadBarrier));
2462
Roland Levillain44015862016-01-22 11:47:17 +00002463 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2464 // Object ArrayGet with Baker's read barrier case.
2465 Register temp = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00002466 // Note that a potential implicit null check is handled in the
2467 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2468 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2469 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002470 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002471 // General case.
2472 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002473 Register length;
2474 if (maybe_compressed_char_at) {
2475 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2476 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002477 {
2478 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2479 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2480
2481 if (instruction->GetArray()->IsIntermediateAddress()) {
2482 DCHECK_LT(count_offset, offset);
2483 int64_t adjusted_offset =
2484 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2485 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2486 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2487 } else {
2488 __ Ldr(length, HeapOperand(obj, count_offset));
2489 }
2490 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002491 }
jessicahandojo05765752016-09-09 19:01:32 -07002492 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002493 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002494 if (maybe_compressed_char_at) {
2495 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002496 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2497 "Expecting 0=compressed, 1=uncompressed");
2498 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002499 __ Ldrb(Register(OutputCPURegister(instruction)),
2500 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2501 __ B(&done);
2502 __ Bind(&uncompressed_load);
2503 __ Ldrh(Register(OutputCPURegister(instruction)),
2504 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2505 __ Bind(&done);
2506 } else {
2507 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2508 source = HeapOperand(obj, offset);
2509 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002510 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002511 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002512 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002513 // We do not need to compute the intermediate address from the array: the
2514 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002515 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002516 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002517 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002518 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2519 }
2520 temp = obj;
2521 } else {
2522 __ Add(temp, obj, offset);
2523 }
jessicahandojo05765752016-09-09 19:01:32 -07002524 if (maybe_compressed_char_at) {
2525 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002526 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2527 "Expecting 0=compressed, 1=uncompressed");
2528 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002529 __ Ldrb(Register(OutputCPURegister(instruction)),
2530 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2531 __ B(&done);
2532 __ Bind(&uncompressed_load);
2533 __ Ldrh(Register(OutputCPURegister(instruction)),
2534 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2535 __ Bind(&done);
2536 } else {
2537 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2538 }
Roland Levillain44015862016-01-22 11:47:17 +00002539 }
jessicahandojo05765752016-09-09 19:01:32 -07002540 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002541 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2542 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002543 codegen_->Load(type, OutputCPURegister(instruction), source);
2544 codegen_->MaybeRecordImplicitNullCheck(instruction);
2545 }
Roland Levillain44015862016-01-22 11:47:17 +00002546
2547 if (type == Primitive::kPrimNot) {
2548 static_assert(
2549 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2550 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2551 Location obj_loc = locations->InAt(0);
2552 if (index.IsConstant()) {
2553 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2554 } else {
2555 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2556 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002557 }
Roland Levillain4d027112015-07-01 15:41:14 +01002558 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002559}
2560
Alexandre Rames5319def2014-10-23 10:03:10 +01002561void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2562 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2563 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002564 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002565}
2566
2567void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002568 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002569 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002570 {
2571 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2572 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2573 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2574 codegen_->MaybeRecordImplicitNullCheck(instruction);
2575 }
jessicahandojo05765752016-09-09 19:01:32 -07002576 // Mask out compression flag from String's array length.
2577 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002578 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002579 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002580}
2581
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002582void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002583 Primitive::Type value_type = instruction->GetComponentType();
2584
2585 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002586 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2587 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002588 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002589 LocationSummary::kCallOnSlowPath :
2590 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002591 locations->SetInAt(0, Location::RequiresRegister());
2592 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002593 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2594 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2595 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002596 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002597 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002598 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002599 }
2600}
2601
2602void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2603 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002604 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002605 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002606 bool needs_write_barrier =
2607 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002608
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002609 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002610 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002611 CPURegister source = value;
2612 Location index = locations->InAt(1);
2613 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2614 MemOperand destination = HeapOperand(array);
2615 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002616
2617 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002618 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002619 if (index.IsConstant()) {
2620 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2621 destination = HeapOperand(array, offset);
2622 } else {
2623 UseScratchRegisterScope temps(masm);
2624 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002625 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002626 // We do not need to compute the intermediate address from the array: the
2627 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002628 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002629 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002630 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002631 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2632 }
2633 temp = array;
2634 } else {
2635 __ Add(temp, array, offset);
2636 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002637 destination = HeapOperand(temp,
2638 XRegisterFrom(index),
2639 LSL,
2640 Primitive::ComponentSizeShift(value_type));
2641 }
Artem Serov914d7a82017-02-07 14:33:49 +00002642 {
2643 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2644 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2645 codegen_->Store(value_type, value, destination);
2646 codegen_->MaybeRecordImplicitNullCheck(instruction);
2647 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002648 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002649 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002650 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002651 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002652 {
2653 // We use a block to end the scratch scope before the write barrier, thus
2654 // freeing the temporary registers so they can be used in `MarkGCCard`.
2655 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002656 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002657 if (index.IsConstant()) {
2658 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002659 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002660 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002661 destination = HeapOperand(temp,
2662 XRegisterFrom(index),
2663 LSL,
2664 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002665 }
2666
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002667 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2668 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2669 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2670
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002671 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002672 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2673 codegen_->AddSlowPath(slow_path);
2674 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002675 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002676 __ Cbnz(Register(value), &non_zero);
2677 if (!index.IsConstant()) {
2678 __ Add(temp, array, offset);
2679 }
Artem Serov914d7a82017-02-07 14:33:49 +00002680 {
2681 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2682 // emitted.
2683 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2684 __ Str(wzr, destination);
2685 codegen_->MaybeRecordImplicitNullCheck(instruction);
2686 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002687 __ B(&done);
2688 __ Bind(&non_zero);
2689 }
2690
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002691 // Note that when Baker read barriers are enabled, the type
2692 // checks are performed without read barriers. This is fine,
2693 // even in the case where a class object is in the from-space
2694 // after the flip, as a comparison involving such a type would
2695 // not produce a false positive; it may of course produce a
2696 // false negative, in which case we would take the ArraySet
2697 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002698
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002699 Register temp2 = temps.AcquireSameSizeAs(array);
2700 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002701 {
2702 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2703 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2704 __ Ldr(temp, HeapOperand(array, class_offset));
2705 codegen_->MaybeRecordImplicitNullCheck(instruction);
2706 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002707 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002708
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002709 // /* HeapReference<Class> */ temp = temp->component_type_
2710 __ Ldr(temp, HeapOperand(temp, component_offset));
2711 // /* HeapReference<Class> */ temp2 = value->klass_
2712 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2713 // If heap poisoning is enabled, no need to unpoison `temp`
2714 // nor `temp2`, as we are comparing two poisoned references.
2715 __ Cmp(temp, temp2);
2716 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002717
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002718 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2719 vixl::aarch64::Label do_put;
2720 __ B(eq, &do_put);
2721 // If heap poisoning is enabled, the `temp` reference has
2722 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002723 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2724
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002725 // /* HeapReference<Class> */ temp = temp->super_class_
2726 __ Ldr(temp, HeapOperand(temp, super_offset));
2727 // If heap poisoning is enabled, no need to unpoison
2728 // `temp`, as we are comparing against null below.
2729 __ Cbnz(temp, slow_path->GetEntryLabel());
2730 __ Bind(&do_put);
2731 } else {
2732 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002733 }
2734 }
2735
2736 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002737 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002738 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002739 __ Mov(temp2, value.W());
2740 GetAssembler()->PoisonHeapReference(temp2);
2741 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002742 }
2743
2744 if (!index.IsConstant()) {
2745 __ Add(temp, array, offset);
2746 }
Artem Serov914d7a82017-02-07 14:33:49 +00002747 {
2748 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2749 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2750 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002751
Artem Serov914d7a82017-02-07 14:33:49 +00002752 if (!may_need_runtime_call_for_type_check) {
2753 codegen_->MaybeRecordImplicitNullCheck(instruction);
2754 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002755 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002756 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002757
2758 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2759
2760 if (done.IsLinked()) {
2761 __ Bind(&done);
2762 }
2763
2764 if (slow_path != nullptr) {
2765 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002766 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002767 }
2768}
2769
Alexandre Rames67555f72014-11-18 10:55:16 +00002770void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002771 RegisterSet caller_saves = RegisterSet::Empty();
2772 InvokeRuntimeCallingConvention calling_convention;
2773 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2774 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2775 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002776 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002777 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002778}
2779
2780void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002781 BoundsCheckSlowPathARM64* slow_path =
2782 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002783 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002784 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2785 __ B(slow_path->GetEntryLabel(), hs);
2786}
2787
Alexandre Rames67555f72014-11-18 10:55:16 +00002788void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2789 LocationSummary* locations =
2790 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2791 locations->SetInAt(0, Location::RequiresRegister());
2792 if (check->HasUses()) {
2793 locations->SetOut(Location::SameAsFirstInput());
2794 }
2795}
2796
2797void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2798 // We assume the class is not null.
2799 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2800 check->GetLoadClass(), check, check->GetDexPc(), true);
2801 codegen_->AddSlowPath(slow_path);
2802 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2803}
2804
Roland Levillain1a653882016-03-18 18:05:57 +00002805static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2806 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2807 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2808}
2809
2810void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2811 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2812 Location rhs_loc = instruction->GetLocations()->InAt(1);
2813 if (rhs_loc.IsConstant()) {
2814 // 0.0 is the only immediate that can be encoded directly in
2815 // an FCMP instruction.
2816 //
2817 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2818 // specify that in a floating-point comparison, positive zero
2819 // and negative zero are considered equal, so we can use the
2820 // literal 0.0 for both cases here.
2821 //
2822 // Note however that some methods (Float.equal, Float.compare,
2823 // Float.compareTo, Double.equal, Double.compare,
2824 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2825 // StrictMath.min) consider 0.0 to be (strictly) greater than
2826 // -0.0. So if we ever translate calls to these methods into a
2827 // HCompare instruction, we must handle the -0.0 case with
2828 // care here.
2829 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2830 __ Fcmp(lhs_reg, 0.0);
2831 } else {
2832 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2833 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002834}
2835
Serban Constantinescu02164b32014-11-13 14:05:07 +00002836void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002837 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002838 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2839 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002840 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002841 case Primitive::kPrimBoolean:
2842 case Primitive::kPrimByte:
2843 case Primitive::kPrimShort:
2844 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002845 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002846 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002847 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002848 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002849 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2850 break;
2851 }
2852 case Primitive::kPrimFloat:
2853 case Primitive::kPrimDouble: {
2854 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002855 locations->SetInAt(1,
2856 IsFloatingPointZeroConstant(compare->InputAt(1))
2857 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2858 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002859 locations->SetOut(Location::RequiresRegister());
2860 break;
2861 }
2862 default:
2863 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2864 }
2865}
2866
2867void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2868 Primitive::Type in_type = compare->InputAt(0)->GetType();
2869
2870 // 0 if: left == right
2871 // 1 if: left > right
2872 // -1 if: left < right
2873 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002874 case Primitive::kPrimBoolean:
2875 case Primitive::kPrimByte:
2876 case Primitive::kPrimShort:
2877 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002878 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002879 case Primitive::kPrimLong: {
2880 Register result = OutputRegister(compare);
2881 Register left = InputRegisterAt(compare, 0);
2882 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002883 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002884 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2885 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002886 break;
2887 }
2888 case Primitive::kPrimFloat:
2889 case Primitive::kPrimDouble: {
2890 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002891 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002892 __ Cset(result, ne);
2893 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002894 break;
2895 }
2896 default:
2897 LOG(FATAL) << "Unimplemented compare type " << in_type;
2898 }
2899}
2900
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002901void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002902 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002903
2904 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2905 locations->SetInAt(0, Location::RequiresFpuRegister());
2906 locations->SetInAt(1,
2907 IsFloatingPointZeroConstant(instruction->InputAt(1))
2908 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2909 : Location::RequiresFpuRegister());
2910 } else {
2911 // Integer cases.
2912 locations->SetInAt(0, Location::RequiresRegister());
2913 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2914 }
2915
David Brazdilb3e773e2016-01-26 11:28:37 +00002916 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002917 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002918 }
2919}
2920
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002921void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002922 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002923 return;
2924 }
2925
2926 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002927 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002928 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002929
Roland Levillain7f63c522015-07-13 15:54:55 +00002930 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002931 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002932 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002933 } else {
2934 // Integer cases.
2935 Register lhs = InputRegisterAt(instruction, 0);
2936 Operand rhs = InputOperandAt(instruction, 1);
2937 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002938 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002939 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002940}
2941
2942#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2943 M(Equal) \
2944 M(NotEqual) \
2945 M(LessThan) \
2946 M(LessThanOrEqual) \
2947 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002948 M(GreaterThanOrEqual) \
2949 M(Below) \
2950 M(BelowOrEqual) \
2951 M(Above) \
2952 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002953#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002954void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2955void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002956FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002957#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002958#undef FOR_EACH_CONDITION_INSTRUCTION
2959
Zheng Xuc6667102015-05-15 16:08:45 +08002960void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2961 DCHECK(instruction->IsDiv() || instruction->IsRem());
2962
2963 LocationSummary* locations = instruction->GetLocations();
2964 Location second = locations->InAt(1);
2965 DCHECK(second.IsConstant());
2966
2967 Register out = OutputRegister(instruction);
2968 Register dividend = InputRegisterAt(instruction, 0);
2969 int64_t imm = Int64FromConstant(second.GetConstant());
2970 DCHECK(imm == 1 || imm == -1);
2971
2972 if (instruction->IsRem()) {
2973 __ Mov(out, 0);
2974 } else {
2975 if (imm == 1) {
2976 __ Mov(out, dividend);
2977 } else {
2978 __ Neg(out, dividend);
2979 }
2980 }
2981}
2982
2983void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2984 DCHECK(instruction->IsDiv() || instruction->IsRem());
2985
2986 LocationSummary* locations = instruction->GetLocations();
2987 Location second = locations->InAt(1);
2988 DCHECK(second.IsConstant());
2989
2990 Register out = OutputRegister(instruction);
2991 Register dividend = InputRegisterAt(instruction, 0);
2992 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002993 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002994 int ctz_imm = CTZ(abs_imm);
2995
2996 UseScratchRegisterScope temps(GetVIXLAssembler());
2997 Register temp = temps.AcquireSameSizeAs(out);
2998
2999 if (instruction->IsDiv()) {
3000 __ Add(temp, dividend, abs_imm - 1);
3001 __ Cmp(dividend, 0);
3002 __ Csel(out, temp, dividend, lt);
3003 if (imm > 0) {
3004 __ Asr(out, out, ctz_imm);
3005 } else {
3006 __ Neg(out, Operand(out, ASR, ctz_imm));
3007 }
3008 } else {
3009 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
3010 __ Asr(temp, dividend, bits - 1);
3011 __ Lsr(temp, temp, bits - ctz_imm);
3012 __ Add(out, dividend, temp);
3013 __ And(out, out, abs_imm - 1);
3014 __ Sub(out, out, temp);
3015 }
3016}
3017
3018void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3019 DCHECK(instruction->IsDiv() || instruction->IsRem());
3020
3021 LocationSummary* locations = instruction->GetLocations();
3022 Location second = locations->InAt(1);
3023 DCHECK(second.IsConstant());
3024
3025 Register out = OutputRegister(instruction);
3026 Register dividend = InputRegisterAt(instruction, 0);
3027 int64_t imm = Int64FromConstant(second.GetConstant());
3028
3029 Primitive::Type type = instruction->GetResultType();
3030 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
3031
3032 int64_t magic;
3033 int shift;
3034 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
3035
3036 UseScratchRegisterScope temps(GetVIXLAssembler());
3037 Register temp = temps.AcquireSameSizeAs(out);
3038
3039 // temp = get_high(dividend * magic)
3040 __ Mov(temp, magic);
3041 if (type == Primitive::kPrimLong) {
3042 __ Smulh(temp, dividend, temp);
3043 } else {
3044 __ Smull(temp.X(), dividend, temp);
3045 __ Lsr(temp.X(), temp.X(), 32);
3046 }
3047
3048 if (imm > 0 && magic < 0) {
3049 __ Add(temp, temp, dividend);
3050 } else if (imm < 0 && magic > 0) {
3051 __ Sub(temp, temp, dividend);
3052 }
3053
3054 if (shift != 0) {
3055 __ Asr(temp, temp, shift);
3056 }
3057
3058 if (instruction->IsDiv()) {
3059 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3060 } else {
3061 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3062 // TODO: Strength reduction for msub.
3063 Register temp_imm = temps.AcquireSameSizeAs(out);
3064 __ Mov(temp_imm, imm);
3065 __ Msub(out, temp, temp_imm, dividend);
3066 }
3067}
3068
3069void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3070 DCHECK(instruction->IsDiv() || instruction->IsRem());
3071 Primitive::Type type = instruction->GetResultType();
3072 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3073
3074 LocationSummary* locations = instruction->GetLocations();
3075 Register out = OutputRegister(instruction);
3076 Location second = locations->InAt(1);
3077
3078 if (second.IsConstant()) {
3079 int64_t imm = Int64FromConstant(second.GetConstant());
3080
3081 if (imm == 0) {
3082 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3083 } else if (imm == 1 || imm == -1) {
3084 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003085 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08003086 DivRemByPowerOfTwo(instruction);
3087 } else {
3088 DCHECK(imm <= -2 || imm >= 2);
3089 GenerateDivRemWithAnyConstant(instruction);
3090 }
3091 } else {
3092 Register dividend = InputRegisterAt(instruction, 0);
3093 Register divisor = InputRegisterAt(instruction, 1);
3094 if (instruction->IsDiv()) {
3095 __ Sdiv(out, dividend, divisor);
3096 } else {
3097 UseScratchRegisterScope temps(GetVIXLAssembler());
3098 Register temp = temps.AcquireSameSizeAs(out);
3099 __ Sdiv(temp, dividend, divisor);
3100 __ Msub(out, temp, divisor, dividend);
3101 }
3102 }
3103}
3104
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003105void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3106 LocationSummary* locations =
3107 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3108 switch (div->GetResultType()) {
3109 case Primitive::kPrimInt:
3110 case Primitive::kPrimLong:
3111 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003112 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003113 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3114 break;
3115
3116 case Primitive::kPrimFloat:
3117 case Primitive::kPrimDouble:
3118 locations->SetInAt(0, Location::RequiresFpuRegister());
3119 locations->SetInAt(1, Location::RequiresFpuRegister());
3120 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3121 break;
3122
3123 default:
3124 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3125 }
3126}
3127
3128void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
3129 Primitive::Type type = div->GetResultType();
3130 switch (type) {
3131 case Primitive::kPrimInt:
3132 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08003133 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003134 break;
3135
3136 case Primitive::kPrimFloat:
3137 case Primitive::kPrimDouble:
3138 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3139 break;
3140
3141 default:
3142 LOG(FATAL) << "Unexpected div type " << type;
3143 }
3144}
3145
Alexandre Rames67555f72014-11-18 10:55:16 +00003146void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003147 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003148 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003149}
3150
3151void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3152 SlowPathCodeARM64* slow_path =
3153 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
3154 codegen_->AddSlowPath(slow_path);
3155 Location value = instruction->GetLocations()->InAt(0);
3156
Alexandre Rames3e69f162014-12-10 10:36:50 +00003157 Primitive::Type type = instruction->GetType();
3158
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003159 if (!Primitive::IsIntegralType(type)) {
3160 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003161 return;
3162 }
3163
Alexandre Rames67555f72014-11-18 10:55:16 +00003164 if (value.IsConstant()) {
3165 int64_t divisor = Int64ConstantFrom(value);
3166 if (divisor == 0) {
3167 __ B(slow_path->GetEntryLabel());
3168 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003169 // A division by a non-null constant is valid. We don't need to perform
3170 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003171 }
3172 } else {
3173 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3174 }
3175}
3176
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003177void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3178 LocationSummary* locations =
3179 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3180 locations->SetOut(Location::ConstantLocation(constant));
3181}
3182
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003183void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3184 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003185 // Will be generated at use site.
3186}
3187
Alexandre Rames5319def2014-10-23 10:03:10 +01003188void LocationsBuilderARM64::VisitExit(HExit* exit) {
3189 exit->SetLocations(nullptr);
3190}
3191
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003192void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003193}
3194
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003195void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3196 LocationSummary* locations =
3197 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3198 locations->SetOut(Location::ConstantLocation(constant));
3199}
3200
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003201void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003202 // Will be generated at use site.
3203}
3204
David Brazdilfc6a86a2015-06-26 10:33:45 +00003205void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003206 DCHECK(!successor->IsExitBlock());
3207 HBasicBlock* block = got->GetBlock();
3208 HInstruction* previous = got->GetPrevious();
3209 HLoopInformation* info = block->GetLoopInformation();
3210
David Brazdil46e2a392015-03-16 17:31:52 +00003211 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003212 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3213 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3214 return;
3215 }
3216 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3217 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3218 }
3219 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003220 __ B(codegen_->GetLabelOf(successor));
3221 }
3222}
3223
David Brazdilfc6a86a2015-06-26 10:33:45 +00003224void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3225 got->SetLocations(nullptr);
3226}
3227
3228void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3229 HandleGoto(got, got->GetSuccessor());
3230}
3231
3232void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3233 try_boundary->SetLocations(nullptr);
3234}
3235
3236void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3237 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3238 if (!successor->IsExitBlock()) {
3239 HandleGoto(try_boundary, successor);
3240 }
3241}
3242
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003243void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003244 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003245 vixl::aarch64::Label* true_target,
3246 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003247 // FP branching requires both targets to be explicit. If either of the targets
3248 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003249 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003250 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003251
David Brazdil0debae72015-11-12 18:37:00 +00003252 if (true_target == nullptr && false_target == nullptr) {
3253 // Nothing to do. The code always falls through.
3254 return;
3255 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003256 // Constant condition, statically compared against "true" (integer value 1).
3257 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003258 if (true_target != nullptr) {
3259 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003260 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003261 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003262 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003263 if (false_target != nullptr) {
3264 __ B(false_target);
3265 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003266 }
David Brazdil0debae72015-11-12 18:37:00 +00003267 return;
3268 }
3269
3270 // The following code generates these patterns:
3271 // (1) true_target == nullptr && false_target != nullptr
3272 // - opposite condition true => branch to false_target
3273 // (2) true_target != nullptr && false_target == nullptr
3274 // - condition true => branch to true_target
3275 // (3) true_target != nullptr && false_target != nullptr
3276 // - condition true => branch to true_target
3277 // - branch to false_target
3278 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003279 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003280 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003281 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003282 if (true_target == nullptr) {
3283 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3284 } else {
3285 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3286 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003287 } else {
3288 // The condition instruction has not been materialized, use its inputs as
3289 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003290 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003291
David Brazdil0debae72015-11-12 18:37:00 +00003292 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00003293 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003294 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003295 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003296 IfCondition opposite_condition = condition->GetOppositeCondition();
3297 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003298 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003299 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003300 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003301 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003302 // Integer cases.
3303 Register lhs = InputRegisterAt(condition, 0);
3304 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003305
3306 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003307 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003308 if (true_target == nullptr) {
3309 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3310 non_fallthrough_target = false_target;
3311 } else {
3312 arm64_cond = ARM64Condition(condition->GetCondition());
3313 non_fallthrough_target = true_target;
3314 }
3315
Aart Bik086d27e2016-01-20 17:02:00 -08003316 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003317 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003318 switch (arm64_cond) {
3319 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003320 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003321 break;
3322 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003323 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003324 break;
3325 case lt:
3326 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003327 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003328 break;
3329 case ge:
3330 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003331 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003332 break;
3333 default:
3334 // Without the `static_cast` the compiler throws an error for
3335 // `-Werror=sign-promo`.
3336 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3337 }
3338 } else {
3339 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003340 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003341 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003342 }
3343 }
David Brazdil0debae72015-11-12 18:37:00 +00003344
3345 // If neither branch falls through (case 3), the conditional branch to `true_target`
3346 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3347 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003348 __ B(false_target);
3349 }
David Brazdil0debae72015-11-12 18:37:00 +00003350
3351 if (fallthrough_target.IsLinked()) {
3352 __ Bind(&fallthrough_target);
3353 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003354}
3355
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003356void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3357 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003358 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003359 locations->SetInAt(0, Location::RequiresRegister());
3360 }
3361}
3362
3363void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003364 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3365 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003366 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3367 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3368 true_target = nullptr;
3369 }
3370 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3371 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3372 false_target = nullptr;
3373 }
David Brazdil0debae72015-11-12 18:37:00 +00003374 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003375}
3376
3377void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3378 LocationSummary* locations = new (GetGraph()->GetArena())
3379 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003380 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003381 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003382 locations->SetInAt(0, Location::RequiresRegister());
3383 }
3384}
3385
3386void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003387 SlowPathCodeARM64* slow_path =
3388 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003389 GenerateTestAndBranch(deoptimize,
3390 /* condition_input_index */ 0,
3391 slow_path->GetEntryLabel(),
3392 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003393}
3394
Mingyao Yang063fc772016-08-02 11:02:54 -07003395void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3396 LocationSummary* locations = new (GetGraph()->GetArena())
3397 LocationSummary(flag, LocationSummary::kNoCall);
3398 locations->SetOut(Location::RequiresRegister());
3399}
3400
3401void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3402 __ Ldr(OutputRegister(flag),
3403 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3404}
3405
David Brazdilc0b601b2016-02-08 14:20:45 +00003406static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3407 return condition->IsCondition() &&
3408 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3409}
3410
Alexandre Rames880f1192016-06-13 16:04:50 +01003411static inline Condition GetConditionForSelect(HCondition* condition) {
3412 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003413 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3414 : ARM64Condition(cond);
3415}
3416
David Brazdil74eb1b22015-12-14 11:44:01 +00003417void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3418 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003419 if (Primitive::IsFloatingPointType(select->GetType())) {
3420 locations->SetInAt(0, Location::RequiresFpuRegister());
3421 locations->SetInAt(1, Location::RequiresFpuRegister());
3422 locations->SetOut(Location::RequiresFpuRegister());
3423 } else {
3424 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3425 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3426 bool is_true_value_constant = cst_true_value != nullptr;
3427 bool is_false_value_constant = cst_false_value != nullptr;
3428 // Ask VIXL whether we should synthesize constants in registers.
3429 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3430 Operand true_op = is_true_value_constant ?
3431 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3432 Operand false_op = is_false_value_constant ?
3433 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3434 bool true_value_in_register = false;
3435 bool false_value_in_register = false;
3436 MacroAssembler::GetCselSynthesisInformation(
3437 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3438 true_value_in_register |= !is_true_value_constant;
3439 false_value_in_register |= !is_false_value_constant;
3440
3441 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3442 : Location::ConstantLocation(cst_true_value));
3443 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3444 : Location::ConstantLocation(cst_false_value));
3445 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003446 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003447
David Brazdil74eb1b22015-12-14 11:44:01 +00003448 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3449 locations->SetInAt(2, Location::RequiresRegister());
3450 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003451}
3452
3453void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003454 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003455 Condition csel_cond;
3456
3457 if (IsBooleanValueOrMaterializedCondition(cond)) {
3458 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003459 // Use the condition flags set by the previous instruction.
3460 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003461 } else {
3462 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003463 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003464 }
3465 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003466 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003467 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003468 } else {
3469 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003470 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003471 }
3472
Alexandre Rames880f1192016-06-13 16:04:50 +01003473 if (Primitive::IsFloatingPointType(select->GetType())) {
3474 __ Fcsel(OutputFPRegister(select),
3475 InputFPRegisterAt(select, 1),
3476 InputFPRegisterAt(select, 0),
3477 csel_cond);
3478 } else {
3479 __ Csel(OutputRegister(select),
3480 InputOperandAt(select, 1),
3481 InputOperandAt(select, 0),
3482 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003483 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003484}
3485
David Srbecky0cf44932015-12-09 14:09:59 +00003486void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3487 new (GetGraph()->GetArena()) LocationSummary(info);
3488}
3489
David Srbeckyd28f4a02016-03-14 17:14:24 +00003490void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3491 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003492}
3493
3494void CodeGeneratorARM64::GenerateNop() {
3495 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003496}
3497
Alexandre Rames5319def2014-10-23 10:03:10 +01003498void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003499 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003500}
3501
3502void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003503 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003504}
3505
3506void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003507 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003508}
3509
3510void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003511 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003512}
3513
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003514// Temp is used for read barrier.
3515static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3516 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003517 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003518 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3519 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3520 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3521 return 1;
3522 }
3523 return 0;
3524}
3525
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003526// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003527// interface pointer, one for loading the current interface.
3528// The other checks have one temp for loading the object's class.
3529static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3530 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3531 return 3;
3532 }
3533 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003534}
3535
Alexandre Rames67555f72014-11-18 10:55:16 +00003536void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003537 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003538 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003539 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003540 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003541 case TypeCheckKind::kExactCheck:
3542 case TypeCheckKind::kAbstractClassCheck:
3543 case TypeCheckKind::kClassHierarchyCheck:
3544 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003545 call_kind =
3546 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003547 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003548 break;
3549 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003550 case TypeCheckKind::kUnresolvedCheck:
3551 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003552 call_kind = LocationSummary::kCallOnSlowPath;
3553 break;
3554 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003555
Alexandre Rames67555f72014-11-18 10:55:16 +00003556 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003557 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003558 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003559 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003560 locations->SetInAt(0, Location::RequiresRegister());
3561 locations->SetInAt(1, Location::RequiresRegister());
3562 // The "out" register is used as a temporary, so it overlaps with the inputs.
3563 // Note that TypeCheckSlowPathARM64 uses this register too.
3564 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003565 // Add temps if necessary for read barriers.
3566 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003567}
3568
3569void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003570 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003571 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003572 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003573 Register obj = InputRegisterAt(instruction, 0);
3574 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003575 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003576 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003577 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3578 DCHECK_LE(num_temps, 1u);
3579 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003580 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3581 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3582 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3583 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003584
Scott Wakeling97c72b72016-06-24 16:19:36 +01003585 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003586 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003587
3588 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003589 // Avoid null check if we know `obj` is not null.
3590 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003591 __ Cbz(obj, &zero);
3592 }
3593
Roland Levillain44015862016-01-22 11:47:17 +00003594 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003595 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003596 // /* HeapReference<Class> */ out = obj->klass_
3597 GenerateReferenceLoadTwoRegisters(instruction,
3598 out_loc,
3599 obj_loc,
3600 class_offset,
3601 maybe_temp_loc,
3602 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003603 __ Cmp(out, cls);
3604 __ Cset(out, eq);
3605 if (zero.IsLinked()) {
3606 __ B(&done);
3607 }
3608 break;
3609 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003610
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003611 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003612 // /* HeapReference<Class> */ out = obj->klass_
3613 GenerateReferenceLoadTwoRegisters(instruction,
3614 out_loc,
3615 obj_loc,
3616 class_offset,
3617 maybe_temp_loc,
3618 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003619 // If the class is abstract, we eagerly fetch the super class of the
3620 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003621 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003622 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003623 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003624 GenerateReferenceLoadOneRegister(instruction,
3625 out_loc,
3626 super_offset,
3627 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003628 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003629 // If `out` is null, we use it for the result, and jump to `done`.
3630 __ Cbz(out, &done);
3631 __ Cmp(out, cls);
3632 __ B(ne, &loop);
3633 __ Mov(out, 1);
3634 if (zero.IsLinked()) {
3635 __ B(&done);
3636 }
3637 break;
3638 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003639
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003640 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003641 // /* HeapReference<Class> */ out = obj->klass_
3642 GenerateReferenceLoadTwoRegisters(instruction,
3643 out_loc,
3644 obj_loc,
3645 class_offset,
3646 maybe_temp_loc,
3647 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003648 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003649 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003650 __ Bind(&loop);
3651 __ Cmp(out, cls);
3652 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003653 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003654 GenerateReferenceLoadOneRegister(instruction,
3655 out_loc,
3656 super_offset,
3657 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003658 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003659 __ Cbnz(out, &loop);
3660 // If `out` is null, we use it for the result, and jump to `done`.
3661 __ B(&done);
3662 __ Bind(&success);
3663 __ Mov(out, 1);
3664 if (zero.IsLinked()) {
3665 __ B(&done);
3666 }
3667 break;
3668 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003669
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003670 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003671 // /* HeapReference<Class> */ out = obj->klass_
3672 GenerateReferenceLoadTwoRegisters(instruction,
3673 out_loc,
3674 obj_loc,
3675 class_offset,
3676 maybe_temp_loc,
3677 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003678 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003679 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003680 __ Cmp(out, cls);
3681 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003682 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003683 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003684 GenerateReferenceLoadOneRegister(instruction,
3685 out_loc,
3686 component_offset,
3687 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003688 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003689 // If `out` is null, we use it for the result, and jump to `done`.
3690 __ Cbz(out, &done);
3691 __ Ldrh(out, HeapOperand(out, primitive_offset));
3692 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3693 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003694 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003695 __ Mov(out, 1);
3696 __ B(&done);
3697 break;
3698 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003699
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003700 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003701 // No read barrier since the slow path will retry upon failure.
3702 // /* HeapReference<Class> */ out = obj->klass_
3703 GenerateReferenceLoadTwoRegisters(instruction,
3704 out_loc,
3705 obj_loc,
3706 class_offset,
3707 maybe_temp_loc,
3708 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003709 __ Cmp(out, cls);
3710 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003711 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3712 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003713 codegen_->AddSlowPath(slow_path);
3714 __ B(ne, slow_path->GetEntryLabel());
3715 __ Mov(out, 1);
3716 if (zero.IsLinked()) {
3717 __ B(&done);
3718 }
3719 break;
3720 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003721
Calin Juravle98893e12015-10-02 21:05:03 +01003722 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003723 case TypeCheckKind::kInterfaceCheck: {
3724 // Note that we indeed only call on slow path, but we always go
3725 // into the slow path for the unresolved and interface check
3726 // cases.
3727 //
3728 // We cannot directly call the InstanceofNonTrivial runtime
3729 // entry point without resorting to a type checking slow path
3730 // here (i.e. by calling InvokeRuntime directly), as it would
3731 // require to assign fixed registers for the inputs of this
3732 // HInstanceOf instruction (following the runtime calling
3733 // convention), which might be cluttered by the potential first
3734 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003735 //
3736 // TODO: Introduce a new runtime entry point taking the object
3737 // to test (instead of its class) as argument, and let it deal
3738 // with the read barrier issues. This will let us refactor this
3739 // case of the `switch` code as it was previously (with a direct
3740 // call to the runtime not using a type checking slow path).
3741 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003742 DCHECK(locations->OnlyCallsOnSlowPath());
3743 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3744 /* is_fatal */ false);
3745 codegen_->AddSlowPath(slow_path);
3746 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003747 if (zero.IsLinked()) {
3748 __ B(&done);
3749 }
3750 break;
3751 }
3752 }
3753
3754 if (zero.IsLinked()) {
3755 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003756 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003757 }
3758
3759 if (done.IsLinked()) {
3760 __ Bind(&done);
3761 }
3762
3763 if (slow_path != nullptr) {
3764 __ Bind(slow_path->GetExitLabel());
3765 }
3766}
3767
3768void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3769 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3770 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3771
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003772 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3773 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003774 case TypeCheckKind::kExactCheck:
3775 case TypeCheckKind::kAbstractClassCheck:
3776 case TypeCheckKind::kClassHierarchyCheck:
3777 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003778 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3779 LocationSummary::kCallOnSlowPath :
3780 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003781 break;
3782 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003783 case TypeCheckKind::kUnresolvedCheck:
3784 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003785 call_kind = LocationSummary::kCallOnSlowPath;
3786 break;
3787 }
3788
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003789 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3790 locations->SetInAt(0, Location::RequiresRegister());
3791 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003792 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3793 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003794}
3795
3796void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003797 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003798 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003799 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003800 Register obj = InputRegisterAt(instruction, 0);
3801 Register cls = InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003802 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3803 DCHECK_GE(num_temps, 1u);
3804 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003805 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003806 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3807 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003808 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003809 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3810 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3811 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3812 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3813 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3814 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3815 const uint32_t object_array_data_offset =
3816 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003817
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003818 bool is_type_check_slow_path_fatal = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003819 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3820 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3821 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003822 if (!kEmitCompilerReadBarrier) {
3823 is_type_check_slow_path_fatal =
3824 (type_check_kind == TypeCheckKind::kExactCheck ||
3825 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3826 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3827 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3828 !instruction->CanThrowIntoCatchBlock();
3829 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003830 SlowPathCodeARM64* type_check_slow_path =
3831 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3832 is_type_check_slow_path_fatal);
3833 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003834
Scott Wakeling97c72b72016-06-24 16:19:36 +01003835 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003836 // Avoid null check if we know obj is not null.
3837 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003838 __ Cbz(obj, &done);
3839 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003840
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003841 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003842 case TypeCheckKind::kExactCheck:
3843 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003844 // /* HeapReference<Class> */ temp = obj->klass_
3845 GenerateReferenceLoadTwoRegisters(instruction,
3846 temp_loc,
3847 obj_loc,
3848 class_offset,
3849 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003850 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003851
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003852 __ Cmp(temp, cls);
3853 // Jump to slow path for throwing the exception or doing a
3854 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003855 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003856 break;
3857 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003858
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003859 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003860 // /* HeapReference<Class> */ temp = obj->klass_
3861 GenerateReferenceLoadTwoRegisters(instruction,
3862 temp_loc,
3863 obj_loc,
3864 class_offset,
3865 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003866 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003867
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003868 // If the class is abstract, we eagerly fetch the super class of the
3869 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003870 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003871 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003872 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003873 GenerateReferenceLoadOneRegister(instruction,
3874 temp_loc,
3875 super_offset,
3876 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003877 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003878
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003879 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3880 // exception.
3881 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3882 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003883 __ Cmp(temp, cls);
3884 __ B(ne, &loop);
3885 break;
3886 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003887
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003888 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003889 // /* HeapReference<Class> */ temp = obj->klass_
3890 GenerateReferenceLoadTwoRegisters(instruction,
3891 temp_loc,
3892 obj_loc,
3893 class_offset,
3894 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003895 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003896
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003897 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003898 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003899 __ Bind(&loop);
3900 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003901 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003902
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003903 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003904 GenerateReferenceLoadOneRegister(instruction,
3905 temp_loc,
3906 super_offset,
3907 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003908 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003909
3910 // If the class reference currently in `temp` is not null, jump
3911 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003912 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003913 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003914 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003915 break;
3916 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003917
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003918 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003919 // /* HeapReference<Class> */ temp = obj->klass_
3920 GenerateReferenceLoadTwoRegisters(instruction,
3921 temp_loc,
3922 obj_loc,
3923 class_offset,
3924 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003925 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003926
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003927 // Do an exact check.
3928 __ Cmp(temp, cls);
3929 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003930
3931 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003933 GenerateReferenceLoadOneRegister(instruction,
3934 temp_loc,
3935 component_offset,
3936 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003937 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003938
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003939 // If the component type is null, jump to the slow path to throw the exception.
3940 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3941 // Otherwise, the object is indeed an array. Further check that this component type is not a
3942 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003943 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3944 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003945 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003946 break;
3947 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003948
Calin Juravle98893e12015-10-02 21:05:03 +01003949 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003950 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003951 //
3952 // We cannot directly call the CheckCast runtime entry point
3953 // without resorting to a type checking slow path here (i.e. by
3954 // calling InvokeRuntime directly), as it would require to
3955 // assign fixed registers for the inputs of this HInstanceOf
3956 // instruction (following the runtime calling convention), which
3957 // might be cluttered by the potential first read barrier
3958 // emission at the beginning of this method.
3959 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003960 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003961 case TypeCheckKind::kInterfaceCheck: {
3962 // /* HeapReference<Class> */ temp = obj->klass_
3963 GenerateReferenceLoadTwoRegisters(instruction,
3964 temp_loc,
3965 obj_loc,
3966 class_offset,
3967 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003968 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003969
3970 // /* HeapReference<Class> */ temp = temp->iftable_
3971 GenerateReferenceLoadTwoRegisters(instruction,
3972 temp_loc,
3973 temp_loc,
3974 iftable_offset,
3975 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003976 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003977 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003978 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003979 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003980 vixl::aarch64::Label start_loop;
3981 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003982 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003983 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3984 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003985 // Go to next interface.
3986 __ Add(temp, temp, 2 * kHeapReferenceSize);
3987 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003988 // Compare the classes and continue the loop if they do not match.
3989 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3990 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003991 break;
3992 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003993 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003994 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003995
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003996 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003997}
3998
Alexandre Rames5319def2014-10-23 10:03:10 +01003999void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
4000 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4001 locations->SetOut(Location::ConstantLocation(constant));
4002}
4003
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004004void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004005 // Will be generated at use site.
4006}
4007
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004008void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
4009 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4010 locations->SetOut(Location::ConstantLocation(constant));
4011}
4012
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004013void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004014 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004015}
4016
Calin Juravle175dc732015-08-25 15:42:32 +01004017void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4018 // The trampoline uses the same calling convention as dex calling conventions,
4019 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4020 // the method_idx.
4021 HandleInvoke(invoke);
4022}
4023
4024void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4025 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4026}
4027
Alexandre Rames5319def2014-10-23 10:03:10 +01004028void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004029 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004030 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004031}
4032
Alexandre Rames67555f72014-11-18 10:55:16 +00004033void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4034 HandleInvoke(invoke);
4035}
4036
4037void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4038 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 LocationSummary* locations = invoke->GetLocations();
4040 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004041 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004042 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004043 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004044
4045 // The register ip1 is required to be used for the hidden argument in
4046 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004047 MacroAssembler* masm = GetVIXLAssembler();
4048 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004049 scratch_scope.Exclude(ip1);
4050 __ Mov(ip1, invoke->GetDexMethodIndex());
4051
Artem Serov914d7a82017-02-07 14:33:49 +00004052 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004053 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004054 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004055 {
4056 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4057 // /* HeapReference<Class> */ temp = temp->klass_
4058 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4059 codegen_->MaybeRecordImplicitNullCheck(invoke);
4060 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004061 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004062 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004063 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004064 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004065 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004066 }
Artem Serov914d7a82017-02-07 14:33:49 +00004067
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004068 // Instead of simply (possibly) unpoisoning `temp` here, we should
4069 // emit a read barrier for the previous class reference load.
4070 // However this is not required in practice, as this is an
4071 // intermediate/temporary reference and because the current
4072 // concurrent copying collector keeps the from-space memory
4073 // intact/accessible until the end of the marking phase (the
4074 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004075 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004076 __ Ldr(temp,
4077 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4078 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004079 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004080 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004081 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004082 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004083 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004084
4085 {
4086 // Ensure the pc position is recorded immediately after the `blr` instruction.
4087 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4088
4089 // lr();
4090 __ blr(lr);
4091 DCHECK(!codegen_->IsLeafMethod());
4092 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4093 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004094}
4095
4096void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraydb7b44a2017-02-28 17:04:50 +00004097 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
Andreas Gampe878d58c2015-01-15 23:24:00 -08004098 if (intrinsic.TryDispatch(invoke)) {
4099 return;
4100 }
4101
Alexandre Rames67555f72014-11-18 10:55:16 +00004102 HandleInvoke(invoke);
4103}
4104
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004105void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004106 // Explicit clinit checks triggered by static invokes must have been pruned by
4107 // art::PrepareForRegisterAllocation.
4108 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004109
Nicolas Geoffraydb7b44a2017-02-28 17:04:50 +00004110 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
Andreas Gampe878d58c2015-01-15 23:24:00 -08004111 if (intrinsic.TryDispatch(invoke)) {
4112 return;
4113 }
4114
Alexandre Rames67555f72014-11-18 10:55:16 +00004115 HandleInvoke(invoke);
4116}
4117
Andreas Gampe878d58c2015-01-15 23:24:00 -08004118static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4119 if (invoke->GetLocations()->Intrinsified()) {
4120 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4121 intrinsic.Dispatch(invoke);
4122 return true;
4123 }
4124 return false;
4125}
4126
Vladimir Markodc151b22015-10-15 18:02:30 +01004127HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4128 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004129 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004130 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004131 return desired_dispatch_info;
4132}
4133
TatWai Chongd8c052a2016-11-02 16:12:48 +08004134Location CodeGeneratorARM64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4135 Location temp) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004136 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004137 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4138 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004139 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4140 uint32_t offset =
4141 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004142 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004143 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004144 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004145 }
Vladimir Marko58155012015-08-19 12:49:41 +00004146 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004147 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004148 break;
4149 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4150 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004151 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004152 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004153 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4154 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00004155 const DexFile& dex_file = invoke->GetDexFileForPcRelativeDexCache();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004156 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004157 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004158 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004159 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004160 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004161 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004162 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004163 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004164 }
Vladimir Marko58155012015-08-19 12:49:41 +00004165 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004166 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004167 Register reg = XRegisterFrom(temp);
4168 Register method_reg;
4169 if (current_method.IsRegister()) {
4170 method_reg = XRegisterFrom(current_method);
4171 } else {
4172 DCHECK(invoke->GetLocations()->Intrinsified());
4173 DCHECK(!current_method.IsValid());
4174 method_reg = reg;
4175 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
4176 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00004177
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004178 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004179 __ Ldr(reg.X(),
4180 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07004181 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004182 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01004183 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4184 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004185 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
4186 break;
4187 }
4188 }
TatWai Chongd8c052a2016-11-02 16:12:48 +08004189 return callee_method;
4190}
4191
4192void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4193 // All registers are assumed to be correctly set up.
4194 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004195
4196 switch (invoke->GetCodePtrLocation()) {
4197 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4198 __ Bl(&frame_entry_label_);
4199 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004200 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4201 // LR = callee_method->entry_point_from_quick_compiled_code_;
4202 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004203 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004204 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004205 {
4206 // To ensure that the pc position is recorded immediately after the `blr` instruction
4207 // BLR must be the last instruction emitted in this function.
4208 // Recording the pc will occur right after returning from this function.
4209 ExactAssemblyScope eas(GetVIXLAssembler(),
4210 kInstructionSize,
4211 CodeBufferCheckScope::kExactSize);
4212 // lr()
4213 __ blr(lr);
4214 }
Vladimir Marko58155012015-08-19 12:49:41 +00004215 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004216 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004217
Andreas Gampe878d58c2015-01-15 23:24:00 -08004218 DCHECK(!IsLeafMethod());
4219}
4220
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004221void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004222 // Use the calling convention instead of the location of the receiver, as
4223 // intrinsics may have put the receiver in a different register. In the intrinsics
4224 // slow path, the arguments have been moved to the right place, so here we are
4225 // guaranteed that the receiver is the first register of the calling convention.
4226 InvokeDexCallingConvention calling_convention;
4227 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004228 Register temp = XRegisterFrom(temp_in);
4229 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4230 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4231 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004232 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004233
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004234 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004235
4236 {
4237 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4238 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4239 // /* HeapReference<Class> */ temp = receiver->klass_
4240 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4241 MaybeRecordImplicitNullCheck(invoke);
4242 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004243 // Instead of simply (possibly) unpoisoning `temp` here, we should
4244 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004245 // intermediate/temporary reference and because the current
4246 // concurrent copying collector keeps the from-space memory
4247 // intact/accessible until the end of the marking phase (the
4248 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004249 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4250 // temp = temp->GetMethodAt(method_offset);
4251 __ Ldr(temp, MemOperand(temp, method_offset));
4252 // lr = temp->GetEntryPoint();
4253 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004254 {
4255 // To ensure that the pc position is recorded immediately after the `blr` instruction
4256 // BLR should be the last instruction emitted in this function.
4257 // Recording the pc will occur right after returning from this function.
4258 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4259 // lr();
4260 __ blr(lr);
4261 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004262}
4263
Orion Hodsonac141392017-01-13 11:53:47 +00004264void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4265 HandleInvoke(invoke);
4266}
4267
4268void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4269 codegen_->GenerateInvokePolymorphicCall(invoke);
4270}
4271
Scott Wakeling97c72b72016-06-24 16:19:36 +01004272vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
4273 const DexFile& dex_file,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004274 dex::StringIndex string_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004275 vixl::aarch64::Label* adrp_label) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004276 return
4277 NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004278}
4279
Scott Wakeling97c72b72016-06-24 16:19:36 +01004280vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
4281 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004282 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004283 vixl::aarch64::Label* adrp_label) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004284 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004285}
4286
Vladimir Marko1998cd02017-01-13 13:02:58 +00004287vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4288 const DexFile& dex_file,
4289 dex::TypeIndex type_index,
4290 vixl::aarch64::Label* adrp_label) {
4291 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
4292}
4293
Scott Wakeling97c72b72016-06-24 16:19:36 +01004294vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
4295 const DexFile& dex_file,
4296 uint32_t element_offset,
4297 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004298 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
4299}
4300
Scott Wakeling97c72b72016-06-24 16:19:36 +01004301vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
4302 const DexFile& dex_file,
4303 uint32_t offset_or_index,
4304 vixl::aarch64::Label* adrp_label,
4305 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004306 // Add a patch entry and return the label.
4307 patches->emplace_back(dex_file, offset_or_index);
4308 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004309 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004310 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4311 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4312 return label;
4313}
4314
Scott Wakeling97c72b72016-06-24 16:19:36 +01004315vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004316 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004317 return boot_image_string_patches_.GetOrCreate(
4318 StringReference(&dex_file, string_index),
4319 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4320}
4321
Scott Wakeling97c72b72016-06-24 16:19:36 +01004322vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Andreas Gampea5b09a62016-11-17 15:21:22 -08004323 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004324 return boot_image_type_patches_.GetOrCreate(
4325 TypeReference(&dex_file, type_index),
4326 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4327}
4328
Scott Wakeling97c72b72016-06-24 16:19:36 +01004329vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4330 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004331 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
4332 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
4333 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
4334}
4335
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004336vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004337 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
4338 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
4339 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004340 return jit_string_patches_.GetOrCreate(
4341 StringReference(&dex_file, string_index),
4342 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4343}
4344
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004345vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004346 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
4347 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
4348 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004349 return jit_class_patches_.GetOrCreate(
4350 TypeReference(&dex_file, type_index),
4351 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4352}
4353
Vladimir Markoaad75c62016-10-03 08:46:48 +00004354void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4355 vixl::aarch64::Register reg) {
4356 DCHECK(reg.IsX());
4357 SingleEmissionCheckScope guard(GetVIXLAssembler());
4358 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004359 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004360}
4361
4362void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4363 vixl::aarch64::Register out,
4364 vixl::aarch64::Register base) {
4365 DCHECK(out.IsX());
4366 DCHECK(base.IsX());
4367 SingleEmissionCheckScope guard(GetVIXLAssembler());
4368 __ Bind(fixup_label);
4369 __ add(out, base, Operand(/* offset placeholder */ 0));
4370}
4371
4372void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4373 vixl::aarch64::Register out,
4374 vixl::aarch64::Register base) {
4375 DCHECK(base.IsX());
4376 SingleEmissionCheckScope guard(GetVIXLAssembler());
4377 __ Bind(fixup_label);
4378 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4379}
4380
4381template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4382inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4383 const ArenaDeque<PcRelativePatchInfo>& infos,
4384 ArenaVector<LinkerPatch>* linker_patches) {
4385 for (const PcRelativePatchInfo& info : infos) {
4386 linker_patches->push_back(Factory(info.label.GetLocation(),
4387 &info.target_dex_file,
4388 info.pc_insn_label->GetLocation(),
4389 info.offset_or_index));
4390 }
4391}
4392
Vladimir Marko58155012015-08-19 12:49:41 +00004393void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4394 DCHECK(linker_patches->empty());
4395 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004396 pc_relative_dex_cache_patches_.size() +
4397 boot_image_string_patches_.size() +
4398 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004399 boot_image_type_patches_.size() +
4400 pc_relative_type_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00004401 type_bss_entry_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004402 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004403 linker_patches->reserve(size);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004404 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004405 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00004406 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004407 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004408 info.offset_or_index));
4409 }
4410 for (const auto& entry : boot_image_string_patches_) {
4411 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004412 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4413 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004414 target_string.dex_file,
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004415 target_string.string_index.index_));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004416 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004417 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004418 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004419 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
4420 linker_patches);
4421 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004422 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
4423 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004424 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
4425 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004426 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004427 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4428 linker_patches);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004429 for (const auto& entry : boot_image_type_patches_) {
4430 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004431 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4432 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004433 target_type.dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004434 target_type.type_index.index_));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004435 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004436 for (const auto& entry : boot_image_address_patches_) {
4437 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004438 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4439 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00004440 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004441 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004442}
4443
Scott Wakeling97c72b72016-06-24 16:19:36 +01004444vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004445 Uint32ToLiteralMap* map) {
4446 return map->GetOrCreate(
4447 value,
4448 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4449}
4450
Scott Wakeling97c72b72016-06-24 16:19:36 +01004451vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004452 return uint64_literals_.GetOrCreate(
4453 value,
4454 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004455}
4456
Scott Wakeling97c72b72016-06-24 16:19:36 +01004457vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00004458 MethodReference target_method,
4459 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004460 return map->GetOrCreate(
4461 target_method,
4462 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00004463}
4464
Andreas Gampe878d58c2015-01-15 23:24:00 -08004465void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004466 // Explicit clinit checks triggered by static invokes must have been pruned by
4467 // art::PrepareForRegisterAllocation.
4468 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004469
Andreas Gampe878d58c2015-01-15 23:24:00 -08004470 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4471 return;
4472 }
4473
Artem Serov914d7a82017-02-07 14:33:49 +00004474 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4475 // are no pools emitted.
4476 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004477 LocationSummary* locations = invoke->GetLocations();
4478 codegen_->GenerateStaticOrDirectCall(
4479 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004480 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004481}
4482
4483void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004484 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4485 return;
4486 }
4487
Artem Serov914d7a82017-02-07 14:33:49 +00004488 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4489 // are no pools emitted.
4490 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004491 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004492 DCHECK(!codegen_->IsLeafMethod());
4493 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4494}
4495
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004496HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4497 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004498 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004499 case HLoadClass::LoadKind::kInvalid:
4500 LOG(FATAL) << "UNREACHABLE";
4501 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004502 case HLoadClass::LoadKind::kReferrersClass:
4503 break;
4504 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4505 DCHECK(!GetCompilerOptions().GetCompilePic());
4506 break;
4507 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4508 DCHECK(GetCompilerOptions().GetCompilePic());
4509 break;
4510 case HLoadClass::LoadKind::kBootImageAddress:
4511 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004512 case HLoadClass::LoadKind::kBssEntry:
4513 DCHECK(!Runtime::Current()->UseJitCompilation());
4514 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004515 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004516 DCHECK(Runtime::Current()->UseJitCompilation());
4517 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004518 case HLoadClass::LoadKind::kDexCacheViaMethod:
4519 break;
4520 }
4521 return desired_class_load_kind;
4522}
4523
Alexandre Rames67555f72014-11-18 10:55:16 +00004524void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004525 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4526 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004527 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004528 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004529 cls,
4530 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004531 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004532 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004533 return;
4534 }
Vladimir Marko41559982017-01-06 14:04:23 +00004535 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004536
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004537 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4538 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004539 ? LocationSummary::kCallOnSlowPath
4540 : LocationSummary::kNoCall;
4541 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004542 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004543 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004544 }
4545
Vladimir Marko41559982017-01-06 14:04:23 +00004546 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004547 locations->SetInAt(0, Location::RequiresRegister());
4548 }
4549 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004550 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4551 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4552 // Rely on the type resolution or initialization and marking to save everything we need.
4553 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4554 // to the custom calling convention) or by marking, so we shall use IP1.
4555 RegisterSet caller_saves = RegisterSet::Empty();
4556 InvokeRuntimeCallingConvention calling_convention;
4557 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4558 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4559 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4560 Primitive::kPrimNot).GetCode());
4561 locations->SetCustomSlowPathCallerSaves(caller_saves);
4562 } else {
4563 // For non-Baker read barrier we have a temp-clobbering call.
4564 }
4565 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004566}
4567
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004568// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4569// move.
4570void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004571 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4572 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4573 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01004574 return;
4575 }
Vladimir Marko41559982017-01-06 14:04:23 +00004576 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004577
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004578 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004579 Register out = OutputRegister(cls);
Vladimir Markoea4c1262017-02-06 19:59:33 +00004580 Register bss_entry_temp;
4581 vixl::aarch64::Label* bss_entry_adrp_label = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00004582
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004583 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4584 ? kWithoutReadBarrier
4585 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004586 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004587 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004588 case HLoadClass::LoadKind::kReferrersClass: {
4589 DCHECK(!cls->CanCallRuntime());
4590 DCHECK(!cls->MustGenerateClinitCheck());
4591 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4592 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004593 GenerateGcRootFieldLoad(cls,
4594 out_loc,
4595 current_method,
4596 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004597 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004598 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004599 break;
4600 }
4601 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004602 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004603 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4604 cls->GetTypeIndex()));
4605 break;
4606 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004607 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004608 // Add ADRP with its PC-relative type patch.
4609 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004610 dex::TypeIndex type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004611 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004612 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004613 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004614 vixl::aarch64::Label* add_label =
4615 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004616 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004617 break;
4618 }
4619 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004620 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004621 uint32_t address = dchecked_integral_cast<uint32_t>(
4622 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
4623 DCHECK_NE(address, 0u);
4624 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004625 break;
4626 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004627 case HLoadClass::LoadKind::kBssEntry: {
4628 // Add ADRP with its PC-relative Class .bss entry patch.
4629 const DexFile& dex_file = cls->GetDexFile();
4630 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markoea4c1262017-02-06 19:59:33 +00004631 // We can go to slow path even with non-zero reference and in that case marking
4632 // can clobber IP0, so we need to use IP1 which shall be preserved.
4633 bss_entry_temp = ip1;
4634 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4635 temps.Exclude(bss_entry_temp);
4636 bss_entry_adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4637 codegen_->EmitAdrpPlaceholder(bss_entry_adrp_label, bss_entry_temp);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004638 // Add LDR with its PC-relative Class patch.
4639 vixl::aarch64::Label* ldr_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +00004640 codegen_->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004641 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4642 GenerateGcRootFieldLoad(cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004643 out_loc,
4644 bss_entry_temp,
4645 /* offset placeholder */ 0u,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004646 ldr_label,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004647 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004648 generate_null_check = true;
4649 break;
4650 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004651 case HLoadClass::LoadKind::kJitTableAddress: {
4652 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4653 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004654 cls->GetClass()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004655 GenerateGcRootFieldLoad(cls,
4656 out_loc,
4657 out.X(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004658 /* offset */ 0,
Roland Levillain00468f32016-10-27 18:02:48 +01004659 /* fixup_label */ nullptr,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004660 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004661 break;
4662 }
Vladimir Marko41559982017-01-06 14:04:23 +00004663 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004664 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004665 LOG(FATAL) << "UNREACHABLE";
4666 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004667 }
4668
Vladimir Markoea4c1262017-02-06 19:59:33 +00004669 bool do_clinit = cls->MustGenerateClinitCheck();
4670 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004671 DCHECK(cls->CanCallRuntime());
4672 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
Vladimir Markoea4c1262017-02-06 19:59:33 +00004673 cls, cls, cls->GetDexPc(), do_clinit, bss_entry_temp, bss_entry_adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004674 codegen_->AddSlowPath(slow_path);
4675 if (generate_null_check) {
4676 __ Cbz(out, slow_path->GetEntryLabel());
4677 }
4678 if (cls->MustGenerateClinitCheck()) {
4679 GenerateClassInitializationCheck(slow_path, out);
4680 } else {
4681 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004682 }
4683 }
4684}
4685
David Brazdilcb1c0552015-08-04 16:22:25 +01004686static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004687 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004688}
4689
Alexandre Rames67555f72014-11-18 10:55:16 +00004690void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4691 LocationSummary* locations =
4692 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4693 locations->SetOut(Location::RequiresRegister());
4694}
4695
4696void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004697 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4698}
4699
4700void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4701 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4702}
4703
4704void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4705 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004706}
4707
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004708HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4709 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004710 switch (desired_string_load_kind) {
4711 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4712 DCHECK(!GetCompilerOptions().GetCompilePic());
4713 break;
4714 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4715 DCHECK(GetCompilerOptions().GetCompilePic());
4716 break;
4717 case HLoadString::LoadKind::kBootImageAddress:
4718 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004719 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004720 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004721 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004722 case HLoadString::LoadKind::kJitTableAddress:
4723 DCHECK(Runtime::Current()->UseJitCompilation());
4724 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004725 case HLoadString::LoadKind::kDexCacheViaMethod:
4726 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004727 }
4728 return desired_string_load_kind;
4729}
4730
Alexandre Rames67555f72014-11-18 10:55:16 +00004731void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004732 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004733 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004734 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004735 InvokeRuntimeCallingConvention calling_convention;
4736 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4737 } else {
4738 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004739 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4740 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004741 // Rely on the pResolveString and marking to save everything we need.
4742 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4743 // to the custom calling convention) or by marking, so we shall use IP1.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004744 RegisterSet caller_saves = RegisterSet::Empty();
4745 InvokeRuntimeCallingConvention calling_convention;
4746 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4747 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4748 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4749 Primitive::kPrimNot).GetCode());
4750 locations->SetCustomSlowPathCallerSaves(caller_saves);
4751 } else {
4752 // For non-Baker read barrier we have a temp-clobbering call.
4753 }
4754 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004755 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004756}
4757
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004758// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4759// move.
4760void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004761 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004762 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004763
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004764 switch (load->GetLoadKind()) {
4765 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004766 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4767 load->GetStringIndex()));
4768 return; // No dex cache slow path.
4769 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004770 // Add ADRP with its PC-relative String patch.
4771 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004772 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004773 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004774 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004775 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004776 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004777 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004778 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004779 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004780 return; // No dex cache slow path.
4781 }
4782 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004783 uint32_t address = dchecked_integral_cast<uint32_t>(
4784 reinterpret_cast<uintptr_t>(load->GetString().Get()));
4785 DCHECK_NE(address, 0u);
4786 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004787 return; // No dex cache slow path.
4788 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004789 case HLoadString::LoadKind::kBssEntry: {
4790 // Add ADRP with its PC-relative String .bss entry patch.
4791 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004792 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004793 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004794 // We could use IP0 as the marking shall not clobber IP0 if the reference is null and
4795 // that's when we need the slow path. But let's not rely on such details and use IP1.
4796 Register temp = ip1;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004797 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004798 temps.Exclude(temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004799 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004800 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004801 // Add LDR with its PC-relative String patch.
4802 vixl::aarch64::Label* ldr_label =
4803 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004804 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoaad75c62016-10-03 08:46:48 +00004805 GenerateGcRootFieldLoad(load,
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004806 out_loc,
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004807 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01004808 /* offset placeholder */ 0u,
4809 ldr_label,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004810 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004811 SlowPathCodeARM64* slow_path =
4812 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004813 codegen_->AddSlowPath(slow_path);
4814 __ Cbz(out.X(), slow_path->GetEntryLabel());
4815 __ Bind(slow_path->GetExitLabel());
4816 return;
4817 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004818 case HLoadString::LoadKind::kJitTableAddress: {
4819 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004820 load->GetStringIndex(),
4821 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004822 GenerateGcRootFieldLoad(load,
4823 out_loc,
4824 out.X(),
4825 /* offset */ 0,
4826 /* fixup_label */ nullptr,
4827 kCompilerReadBarrierOption);
4828 return;
4829 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004830 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004831 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004832 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004833
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004834 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004835 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004836 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004837 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004838 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4839 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004840}
4841
Alexandre Rames5319def2014-10-23 10:03:10 +01004842void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4843 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4844 locations->SetOut(Location::ConstantLocation(constant));
4845}
4846
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004847void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004848 // Will be generated at use site.
4849}
4850
Alexandre Rames67555f72014-11-18 10:55:16 +00004851void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4852 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004853 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004854 InvokeRuntimeCallingConvention calling_convention;
4855 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4856}
4857
4858void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004859 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004860 instruction,
4861 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004862 if (instruction->IsEnter()) {
4863 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4864 } else {
4865 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4866 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004867}
4868
Alexandre Rames42d641b2014-10-27 14:00:51 +00004869void LocationsBuilderARM64::VisitMul(HMul* mul) {
4870 LocationSummary* locations =
4871 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4872 switch (mul->GetResultType()) {
4873 case Primitive::kPrimInt:
4874 case Primitive::kPrimLong:
4875 locations->SetInAt(0, Location::RequiresRegister());
4876 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004877 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004878 break;
4879
4880 case Primitive::kPrimFloat:
4881 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004882 locations->SetInAt(0, Location::RequiresFpuRegister());
4883 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004884 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004885 break;
4886
4887 default:
4888 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4889 }
4890}
4891
4892void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4893 switch (mul->GetResultType()) {
4894 case Primitive::kPrimInt:
4895 case Primitive::kPrimLong:
4896 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4897 break;
4898
4899 case Primitive::kPrimFloat:
4900 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004901 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004902 break;
4903
4904 default:
4905 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4906 }
4907}
4908
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004909void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4910 LocationSummary* locations =
4911 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4912 switch (neg->GetResultType()) {
4913 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004914 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004915 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004916 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004917 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004918
4919 case Primitive::kPrimFloat:
4920 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004921 locations->SetInAt(0, Location::RequiresFpuRegister());
4922 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004923 break;
4924
4925 default:
4926 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4927 }
4928}
4929
4930void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4931 switch (neg->GetResultType()) {
4932 case Primitive::kPrimInt:
4933 case Primitive::kPrimLong:
4934 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4935 break;
4936
4937 case Primitive::kPrimFloat:
4938 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004939 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004940 break;
4941
4942 default:
4943 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4944 }
4945}
4946
4947void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4948 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004949 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004950 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004951 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004952 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4953 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004954}
4955
4956void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004957 // Note: if heap poisoning is enabled, the entry point takes cares
4958 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004959 QuickEntrypointEnum entrypoint =
4960 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4961 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004962 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004963}
4964
Alexandre Rames5319def2014-10-23 10:03:10 +01004965void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4966 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004967 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004968 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004969 if (instruction->IsStringAlloc()) {
4970 locations->AddTemp(LocationFrom(kArtMethodRegister));
4971 } else {
4972 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004973 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004974 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4975}
4976
4977void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004978 // Note: if heap poisoning is enabled, the entry point takes cares
4979 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004980 if (instruction->IsStringAlloc()) {
4981 // String is allocated through StringFactory. Call NewEmptyString entry point.
4982 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004983 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004984 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4985 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004986
4987 {
4988 // Ensure the pc position is recorded immediately after the `blr` instruction.
4989 ExactAssemblyScope eas(GetVIXLAssembler(),
4990 kInstructionSize,
4991 CodeBufferCheckScope::kExactSize);
4992 __ blr(lr);
4993 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4994 }
David Brazdil6de19382016-01-08 17:37:10 +00004995 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004996 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004997 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004998 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004999}
5000
5001void LocationsBuilderARM64::VisitNot(HNot* instruction) {
5002 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005003 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005004 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005005}
5006
5007void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005008 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005009 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01005010 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005011 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005012 break;
5013
5014 default:
5015 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5016 }
5017}
5018
David Brazdil66d126e2015-04-03 16:02:44 +01005019void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
5020 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5021 locations->SetInAt(0, Location::RequiresRegister());
5022 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5023}
5024
5025void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005026 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005027}
5028
Alexandre Rames5319def2014-10-23 10:03:10 +01005029void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005030 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5031 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005032}
5033
Calin Juravle2ae48182016-03-16 14:05:09 +00005034void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5035 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005036 return;
5037 }
Artem Serov914d7a82017-02-07 14:33:49 +00005038 {
5039 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5040 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5041 Location obj = instruction->GetLocations()->InAt(0);
5042 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5043 RecordPcInfo(instruction, instruction->GetDexPc());
5044 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005045}
5046
Calin Juravle2ae48182016-03-16 14:05:09 +00005047void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005048 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005049 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005050
5051 LocationSummary* locations = instruction->GetLocations();
5052 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005053
5054 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005055}
5056
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005057void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005058 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005059}
5060
Alexandre Rames67555f72014-11-18 10:55:16 +00005061void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5062 HandleBinaryOp(instruction);
5063}
5064
5065void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5066 HandleBinaryOp(instruction);
5067}
5068
Alexandre Rames3e69f162014-12-10 10:36:50 +00005069void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5070 LOG(FATAL) << "Unreachable";
5071}
5072
5073void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
5074 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5075}
5076
Alexandre Rames5319def2014-10-23 10:03:10 +01005077void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
5078 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5079 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5080 if (location.IsStackSlot()) {
5081 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5082 } else if (location.IsDoubleStackSlot()) {
5083 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5084 }
5085 locations->SetOut(location);
5086}
5087
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005088void InstructionCodeGeneratorARM64::VisitParameterValue(
5089 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005090 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005091}
5092
5093void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5094 LocationSummary* locations =
5095 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005096 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005097}
5098
5099void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5100 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5101 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005102}
5103
5104void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
5105 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005106 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005107 locations->SetInAt(i, Location::Any());
5108 }
5109 locations->SetOut(Location::Any());
5110}
5111
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005112void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005113 LOG(FATAL) << "Unreachable";
5114}
5115
Serban Constantinescu02164b32014-11-13 14:05:07 +00005116void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005117 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005118 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005119 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5120 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005121 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5122
5123 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005124 case Primitive::kPrimInt:
5125 case Primitive::kPrimLong:
5126 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005127 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5129 break;
5130
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005131 case Primitive::kPrimFloat:
5132 case Primitive::kPrimDouble: {
5133 InvokeRuntimeCallingConvention calling_convention;
5134 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5135 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5136 locations->SetOut(calling_convention.GetReturnLocation(type));
5137
5138 break;
5139 }
5140
Serban Constantinescu02164b32014-11-13 14:05:07 +00005141 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005142 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005143 }
5144}
5145
5146void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
5147 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005148
Serban Constantinescu02164b32014-11-13 14:05:07 +00005149 switch (type) {
5150 case Primitive::kPrimInt:
5151 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08005152 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005153 break;
5154 }
5155
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005156 case Primitive::kPrimFloat:
5157 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005158 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5159 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005160 if (type == Primitive::kPrimFloat) {
5161 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5162 } else {
5163 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5164 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005165 break;
5166 }
5167
Serban Constantinescu02164b32014-11-13 14:05:07 +00005168 default:
5169 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005170 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005171 }
5172}
5173
Calin Juravle27df7582015-04-17 19:12:31 +01005174void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5175 memory_barrier->SetLocations(nullptr);
5176}
5177
5178void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005179 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005180}
5181
Alexandre Rames5319def2014-10-23 10:03:10 +01005182void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
5183 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5184 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005185 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005186}
5187
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005188void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005189 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005190}
5191
5192void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5193 instruction->SetLocations(nullptr);
5194}
5195
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005196void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005197 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005198}
5199
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005200void LocationsBuilderARM64::VisitRor(HRor* ror) {
5201 HandleBinaryOp(ror);
5202}
5203
5204void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5205 HandleBinaryOp(ror);
5206}
5207
Serban Constantinescu02164b32014-11-13 14:05:07 +00005208void LocationsBuilderARM64::VisitShl(HShl* shl) {
5209 HandleShift(shl);
5210}
5211
5212void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5213 HandleShift(shl);
5214}
5215
5216void LocationsBuilderARM64::VisitShr(HShr* shr) {
5217 HandleShift(shr);
5218}
5219
5220void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5221 HandleShift(shr);
5222}
5223
Alexandre Rames5319def2014-10-23 10:03:10 +01005224void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005225 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005226}
5227
5228void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005229 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005230}
5231
Alexandre Rames67555f72014-11-18 10:55:16 +00005232void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005233 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00005234}
5235
5236void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005237 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005238}
5239
5240void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005241 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005242}
5243
Alexandre Rames67555f72014-11-18 10:55:16 +00005244void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005245 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005246}
5247
Calin Juravlee460d1d2015-09-29 04:52:17 +01005248void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5249 HUnresolvedInstanceFieldGet* instruction) {
5250 FieldAccessCallingConventionARM64 calling_convention;
5251 codegen_->CreateUnresolvedFieldLocationSummary(
5252 instruction, instruction->GetFieldType(), calling_convention);
5253}
5254
5255void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5256 HUnresolvedInstanceFieldGet* instruction) {
5257 FieldAccessCallingConventionARM64 calling_convention;
5258 codegen_->GenerateUnresolvedFieldAccess(instruction,
5259 instruction->GetFieldType(),
5260 instruction->GetFieldIndex(),
5261 instruction->GetDexPc(),
5262 calling_convention);
5263}
5264
5265void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5266 HUnresolvedInstanceFieldSet* instruction) {
5267 FieldAccessCallingConventionARM64 calling_convention;
5268 codegen_->CreateUnresolvedFieldLocationSummary(
5269 instruction, instruction->GetFieldType(), calling_convention);
5270}
5271
5272void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5273 HUnresolvedInstanceFieldSet* instruction) {
5274 FieldAccessCallingConventionARM64 calling_convention;
5275 codegen_->GenerateUnresolvedFieldAccess(instruction,
5276 instruction->GetFieldType(),
5277 instruction->GetFieldIndex(),
5278 instruction->GetDexPc(),
5279 calling_convention);
5280}
5281
5282void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5283 HUnresolvedStaticFieldGet* instruction) {
5284 FieldAccessCallingConventionARM64 calling_convention;
5285 codegen_->CreateUnresolvedFieldLocationSummary(
5286 instruction, instruction->GetFieldType(), calling_convention);
5287}
5288
5289void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5290 HUnresolvedStaticFieldGet* instruction) {
5291 FieldAccessCallingConventionARM64 calling_convention;
5292 codegen_->GenerateUnresolvedFieldAccess(instruction,
5293 instruction->GetFieldType(),
5294 instruction->GetFieldIndex(),
5295 instruction->GetDexPc(),
5296 calling_convention);
5297}
5298
5299void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5300 HUnresolvedStaticFieldSet* instruction) {
5301 FieldAccessCallingConventionARM64 calling_convention;
5302 codegen_->CreateUnresolvedFieldLocationSummary(
5303 instruction, instruction->GetFieldType(), calling_convention);
5304}
5305
5306void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5307 HUnresolvedStaticFieldSet* instruction) {
5308 FieldAccessCallingConventionARM64 calling_convention;
5309 codegen_->GenerateUnresolvedFieldAccess(instruction,
5310 instruction->GetFieldType(),
5311 instruction->GetFieldIndex(),
5312 instruction->GetDexPc(),
5313 calling_convention);
5314}
5315
Alexandre Rames5319def2014-10-23 10:03:10 +01005316void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005317 LocationSummary* locations =
5318 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005319 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01005320}
5321
5322void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005323 HBasicBlock* block = instruction->GetBlock();
5324 if (block->GetLoopInformation() != nullptr) {
5325 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5326 // The back edge will generate the suspend check.
5327 return;
5328 }
5329 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5330 // The goto will generate the suspend check.
5331 return;
5332 }
5333 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01005334}
5335
Alexandre Rames67555f72014-11-18 10:55:16 +00005336void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
5337 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005338 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005339 InvokeRuntimeCallingConvention calling_convention;
5340 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5341}
5342
5343void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005344 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005345 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005346}
5347
5348void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5349 LocationSummary* locations =
5350 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
5351 Primitive::Type input_type = conversion->GetInputType();
5352 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00005353 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00005354 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5355 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5356 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5357 }
5358
Alexandre Rames542361f2015-01-29 16:57:31 +00005359 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005360 locations->SetInAt(0, Location::RequiresFpuRegister());
5361 } else {
5362 locations->SetInAt(0, Location::RequiresRegister());
5363 }
5364
Alexandre Rames542361f2015-01-29 16:57:31 +00005365 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005366 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5367 } else {
5368 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5369 }
5370}
5371
5372void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
5373 Primitive::Type result_type = conversion->GetResultType();
5374 Primitive::Type input_type = conversion->GetInputType();
5375
5376 DCHECK_NE(input_type, result_type);
5377
Alexandre Rames542361f2015-01-29 16:57:31 +00005378 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005379 int result_size = Primitive::ComponentSize(result_type);
5380 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005381 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005382 Register output = OutputRegister(conversion);
5383 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00005384 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005385 // 'int' values are used directly as W registers, discarding the top
5386 // bits, so we don't need to sign-extend and can just perform a move.
5387 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5388 // top 32 bits of the target register. We theoretically could leave those
5389 // bits unchanged, but we would have to make sure that no code uses a
5390 // 32bit input value as a 64bit value assuming that the top 32 bits are
5391 // zero.
5392 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00005393 } else if (result_type == Primitive::kPrimChar ||
5394 (input_type == Primitive::kPrimChar && input_size < result_size)) {
5395 __ Ubfx(output,
5396 output.IsX() ? source.X() : source.W(),
5397 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005398 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005399 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005400 }
Alexandre Rames542361f2015-01-29 16:57:31 +00005401 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005402 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005403 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005404 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
5405 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005406 } else if (Primitive::IsFloatingPointType(result_type) &&
5407 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005408 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5409 } else {
5410 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5411 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005412 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005413}
Alexandre Rames67555f72014-11-18 10:55:16 +00005414
Serban Constantinescu02164b32014-11-13 14:05:07 +00005415void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5416 HandleShift(ushr);
5417}
5418
5419void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5420 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005421}
5422
5423void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5424 HandleBinaryOp(instruction);
5425}
5426
5427void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5428 HandleBinaryOp(instruction);
5429}
5430
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005431void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005432 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005433 LOG(FATAL) << "Unreachable";
5434}
5435
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005436void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005437 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005438 LOG(FATAL) << "Unreachable";
5439}
5440
Mark Mendellfe57faa2015-09-18 09:26:15 -04005441// Simple implementation of packed switch - generate cascaded compare/jumps.
5442void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5443 LocationSummary* locations =
5444 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
5445 locations->SetInAt(0, Location::RequiresRegister());
5446}
5447
5448void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5449 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005450 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005451 Register value_reg = InputRegisterAt(switch_instr, 0);
5452 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5453
Zheng Xu3927c8b2015-11-18 17:46:25 +08005454 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005455 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005456 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5457 // make sure we don't emit it if the target may run out of range.
5458 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5459 // ranges and emit the tables only as required.
5460 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005461
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005462 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005463 // Current instruction id is an upper bound of the number of HIRs in the graph.
5464 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5465 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005466 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5467 Register temp = temps.AcquireW();
5468 __ Subs(temp, value_reg, Operand(lower_bound));
5469
Zheng Xu3927c8b2015-11-18 17:46:25 +08005470 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005471 // Jump to successors[0] if value == lower_bound.
5472 __ B(eq, codegen_->GetLabelOf(successors[0]));
5473 int32_t last_index = 0;
5474 for (; num_entries - last_index > 2; last_index += 2) {
5475 __ Subs(temp, temp, Operand(2));
5476 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5477 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5478 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5479 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5480 }
5481 if (num_entries - last_index == 2) {
5482 // The last missing case_value.
5483 __ Cmp(temp, Operand(1));
5484 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005485 }
5486
5487 // And the default for any other value.
5488 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5489 __ B(codegen_->GetLabelOf(default_block));
5490 }
5491 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005492 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005493
5494 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5495
5496 // Below instructions should use at most one blocked register. Since there are two blocked
5497 // registers, we are free to block one.
5498 Register temp_w = temps.AcquireW();
5499 Register index;
5500 // Remove the bias.
5501 if (lower_bound != 0) {
5502 index = temp_w;
5503 __ Sub(index, value_reg, Operand(lower_bound));
5504 } else {
5505 index = value_reg;
5506 }
5507
5508 // Jump to default block if index is out of the range.
5509 __ Cmp(index, Operand(num_entries));
5510 __ B(hs, codegen_->GetLabelOf(default_block));
5511
5512 // In current VIXL implementation, it won't require any blocked registers to encode the
5513 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5514 // register pressure.
5515 Register table_base = temps.AcquireX();
5516 // Load jump offset from the table.
5517 __ Adr(table_base, jump_table->GetTableStartLabel());
5518 Register jump_offset = temp_w;
5519 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5520
5521 // Jump to target block by branching to table_base(pc related) + offset.
5522 Register target_address = table_base;
5523 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5524 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005525 }
5526}
5527
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005528void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5529 HInstruction* instruction,
5530 Location out,
5531 uint32_t offset,
5532 Location maybe_temp,
5533 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005534 Primitive::Type type = Primitive::kPrimNot;
5535 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005536 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005537 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005538 Register temp_reg = RegisterFrom(maybe_temp, type);
5539 if (kUseBakerReadBarrier) {
5540 // Load with fast path based Baker's read barrier.
5541 // /* HeapReference<Object> */ out = *(out + offset)
5542 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5543 out,
5544 out_reg,
5545 offset,
5546 temp_reg,
5547 /* needs_null_check */ false,
5548 /* use_load_acquire */ false);
5549 } else {
5550 // Load with slow path based read barrier.
5551 // Save the value of `out` into `maybe_temp` before overwriting it
5552 // in the following move operation, as we will need it for the
5553 // read barrier below.
5554 __ Mov(temp_reg, out_reg);
5555 // /* HeapReference<Object> */ out = *(out + offset)
5556 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5557 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5558 }
5559 } else {
5560 // Plain load with no read barrier.
5561 // /* HeapReference<Object> */ out = *(out + offset)
5562 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5563 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5564 }
5565}
5566
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005567void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5568 HInstruction* instruction,
5569 Location out,
5570 Location obj,
5571 uint32_t offset,
5572 Location maybe_temp,
5573 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005574 Primitive::Type type = Primitive::kPrimNot;
5575 Register out_reg = RegisterFrom(out, type);
5576 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005577 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005578 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005579 if (kUseBakerReadBarrier) {
5580 // Load with fast path based Baker's read barrier.
5581 Register temp_reg = RegisterFrom(maybe_temp, type);
5582 // /* HeapReference<Object> */ out = *(obj + offset)
5583 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5584 out,
5585 obj_reg,
5586 offset,
5587 temp_reg,
5588 /* needs_null_check */ false,
5589 /* use_load_acquire */ false);
5590 } else {
5591 // Load with slow path based read barrier.
5592 // /* HeapReference<Object> */ out = *(obj + offset)
5593 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5594 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5595 }
5596 } else {
5597 // Plain load with no read barrier.
5598 // /* HeapReference<Object> */ out = *(obj + offset)
5599 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5600 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5601 }
5602}
5603
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005604void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(
5605 HInstruction* instruction,
5606 Location root,
5607 Register obj,
5608 uint32_t offset,
5609 vixl::aarch64::Label* fixup_label,
5610 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005611 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005612 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005613 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005614 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005615 if (kUseBakerReadBarrier) {
5616 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillain35345a52017-02-27 14:32:08 +00005617 // Baker's read barrier are used:
Roland Levillain44015862016-01-22 11:47:17 +00005618 //
Roland Levillain35345a52017-02-27 14:32:08 +00005619 // root = obj.field;
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005620 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
Roland Levillain35345a52017-02-27 14:32:08 +00005621 // if (temp != null) {
5622 // root = temp(root)
Roland Levillain44015862016-01-22 11:47:17 +00005623 // }
5624
5625 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005626 if (fixup_label == nullptr) {
5627 __ Ldr(root_reg, MemOperand(obj, offset));
5628 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005629 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005630 }
Roland Levillain44015862016-01-22 11:47:17 +00005631 static_assert(
5632 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5633 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5634 "have different sizes.");
5635 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5636 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5637 "have different sizes.");
5638
Roland Levillain35345a52017-02-27 14:32:08 +00005639 Register temp = lr;
5640
5641 // Slow path marking the GC root `root`. The entrypoint will alrady be loaded in temp.
5642 SlowPathCodeARM64* slow_path =
5643 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction,
5644 root,
5645 LocationFrom(temp));
5646 codegen_->AddSlowPath(slow_path);
5647 const int32_t entry_point_offset =
5648 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(root.reg());
5649 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5650 // Loading the entrypoint does not require a load acquire since it is only changed when
5651 // threads are suspended or running a checkpoint.
5652 __ Ldr(temp, MemOperand(tr, entry_point_offset));
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005653 // The entrypoint is null when the GC is not marking, this prevents one load compared to
5654 // checking GetIsGcMarking.
Roland Levillain44015862016-01-22 11:47:17 +00005655 __ Cbnz(temp, slow_path->GetEntryLabel());
5656 __ Bind(slow_path->GetExitLabel());
5657 } else {
5658 // GC root loaded through a slow path for read barriers other
5659 // than Baker's.
5660 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005661 if (fixup_label == nullptr) {
5662 __ Add(root_reg.X(), obj.X(), offset);
5663 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005664 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005665 }
Roland Levillain44015862016-01-22 11:47:17 +00005666 // /* mirror::Object* */ root = root->Read()
5667 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5668 }
5669 } else {
5670 // Plain GC root load with no read barrier.
5671 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005672 if (fixup_label == nullptr) {
5673 __ Ldr(root_reg, MemOperand(obj, offset));
5674 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005675 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005676 }
Roland Levillain44015862016-01-22 11:47:17 +00005677 // Note that GC roots are not affected by heap poisoning, thus we
5678 // do not have to unpoison `root_reg` here.
5679 }
5680}
5681
5682void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5683 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005684 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005685 uint32_t offset,
5686 Register temp,
5687 bool needs_null_check,
5688 bool use_load_acquire) {
5689 DCHECK(kEmitCompilerReadBarrier);
5690 DCHECK(kUseBakerReadBarrier);
5691
5692 // /* HeapReference<Object> */ ref = *(obj + offset)
5693 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005694 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01005695 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5696 ref,
5697 obj,
5698 offset,
5699 no_index,
5700 no_scale_factor,
5701 temp,
5702 needs_null_check,
5703 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005704}
5705
5706void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5707 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005708 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005709 uint32_t data_offset,
5710 Location index,
5711 Register temp,
5712 bool needs_null_check) {
5713 DCHECK(kEmitCompilerReadBarrier);
5714 DCHECK(kUseBakerReadBarrier);
5715
5716 // Array cells are never volatile variables, therefore array loads
5717 // never use Load-Acquire instructions on ARM64.
5718 const bool use_load_acquire = false;
5719
Roland Levillainbfea3352016-06-23 13:48:47 +01005720 static_assert(
5721 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5722 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005723 // /* HeapReference<Object> */ ref =
5724 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005725 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5726 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5727 ref,
5728 obj,
5729 data_offset,
5730 index,
5731 scale_factor,
5732 temp,
5733 needs_null_check,
5734 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005735}
5736
5737void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5738 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005739 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005740 uint32_t offset,
5741 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005742 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005743 Register temp,
5744 bool needs_null_check,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005745 bool use_load_acquire,
5746 bool always_update_field) {
Roland Levillain44015862016-01-22 11:47:17 +00005747 DCHECK(kEmitCompilerReadBarrier);
5748 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005749 // If we are emitting an array load, we should not be using a
5750 // Load Acquire instruction. In other words:
5751 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5752 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005753
Roland Levillain35345a52017-02-27 14:32:08 +00005754 MacroAssembler* masm = GetVIXLAssembler();
5755 UseScratchRegisterScope temps(masm);
5756
5757 // In slow path based read barriers, the read barrier call is
5758 // inserted after the original load. However, in fast path based
5759 // Baker's read barriers, we need to perform the load of
5760 // mirror::Object::monitor_ *before* the original reference load.
5761 // This load-load ordering is required by the read barrier.
5762 // The fast path/slow path (for Baker's algorithm) should look like:
Roland Levillain44015862016-01-22 11:47:17 +00005763 //
Roland Levillain35345a52017-02-27 14:32:08 +00005764 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5765 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5766 // HeapReference<Object> ref = *src; // Original reference load.
5767 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5768 // if (is_gray) {
5769 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005770 // }
Roland Levillain35345a52017-02-27 14:32:08 +00005771 //
5772 // Note: the original implementation in ReadBarrier::Barrier is
5773 // slightly more complex as it performs additional checks that we do
5774 // not do here for performance reasons.
Roland Levillain44015862016-01-22 11:47:17 +00005775
5776 Primitive::Type type = Primitive::kPrimNot;
5777 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain35345a52017-02-27 14:32:08 +00005778 DCHECK(obj.IsW());
5779 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Roland Levillain44015862016-01-22 11:47:17 +00005780
Roland Levillain35345a52017-02-27 14:32:08 +00005781 {
5782 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5783 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5784 // /* int32_t */ monitor = obj->monitor_
5785 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5786 if (needs_null_check) {
5787 MaybeRecordImplicitNullCheck(instruction);
5788 }
5789 }
5790 // /* LockWord */ lock_word = LockWord(monitor)
5791 static_assert(sizeof(LockWord) == sizeof(int32_t),
5792 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005793
Roland Levillain35345a52017-02-27 14:32:08 +00005794 // Introduce a dependency on the lock_word including rb_state,
5795 // to prevent load-load reordering, and without using
5796 // a memory barrier (which would be more expensive).
5797 // `obj` is unchanged by this operation, but its value now depends
5798 // on `temp`.
5799 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
5800
5801 // The actual reference load.
Roland Levillain44015862016-01-22 11:47:17 +00005802 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005803 // Load types involving an "index": ArrayGet,
5804 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5805 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01005806 if (use_load_acquire) {
5807 // UnsafeGetObjectVolatile intrinsic case.
5808 // Register `index` is not an index in an object array, but an
5809 // offset to an object reference field within object `obj`.
5810 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5811 DCHECK(instruction->GetLocations()->Intrinsified());
5812 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5813 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005814 DCHECK_EQ(offset, 0u);
5815 DCHECK_EQ(scale_factor, 0u);
Roland Levillain35345a52017-02-27 14:32:08 +00005816 DCHECK_EQ(needs_null_check, 0u);
5817 // /* HeapReference<Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01005818 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5819 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005820 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005821 // ArrayGet and UnsafeGetObject intrinsics cases.
5822 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01005823 if (index.IsConstant()) {
5824 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5825 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5826 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005827 Register temp3 = temps.AcquireW();
5828 __ Add(temp3, obj, offset);
5829 Load(type, ref_reg, HeapOperand(temp3, XRegisterFrom(index), LSL, scale_factor));
5830 temps.Release(temp3);
Roland Levillainbfea3352016-06-23 13:48:47 +01005831 }
Roland Levillain44015862016-01-22 11:47:17 +00005832 }
Roland Levillain44015862016-01-22 11:47:17 +00005833 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005834 // /* HeapReference<Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00005835 MemOperand field = HeapOperand(obj, offset);
5836 if (use_load_acquire) {
Roland Levillain35345a52017-02-27 14:32:08 +00005837 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005838 } else {
5839 Load(type, ref_reg, field);
5840 }
5841 }
5842
5843 // Object* ref = ref_addr->AsMirrorPtr()
5844 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain35345a52017-02-27 14:32:08 +00005845
5846 // Slow path marking the object `ref` when it is gray.
5847 SlowPathCodeARM64* slow_path;
5848 if (always_update_field) {
5849 // ReadBarrierMarkAndUpdateFieldSlowPathARM64 only supports
5850 // address of the form `obj + field_offset`, where `obj` is a
5851 // register and `field_offset` is a register. Thus `offset` and
5852 // `scale_factor` above are expected to be null in this code path.
5853 DCHECK_EQ(offset, 0u);
5854 DCHECK_EQ(scale_factor, 0u); /* "times 1" */
5855 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathARM64(
5856 instruction, ref, obj, /* field_offset */ index, temp);
5857 } else {
5858 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
5859 }
5860 AddSlowPath(slow_path);
5861
5862 // if (rb_state == ReadBarrier::GrayState())
5863 // ref = ReadBarrier::Mark(ref);
5864 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5865 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5866 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5867 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
5868 __ Bind(slow_path->GetExitLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005869}
5870
5871void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5872 Location out,
5873 Location ref,
5874 Location obj,
5875 uint32_t offset,
5876 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005877 DCHECK(kEmitCompilerReadBarrier);
5878
Roland Levillain44015862016-01-22 11:47:17 +00005879 // Insert a slow path based read barrier *after* the reference load.
5880 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005881 // If heap poisoning is enabled, the unpoisoning of the loaded
5882 // reference will be carried out by the runtime within the slow
5883 // path.
5884 //
5885 // Note that `ref` currently does not get unpoisoned (when heap
5886 // poisoning is enabled), which is alright as the `ref` argument is
5887 // not used by the artReadBarrierSlow entry point.
5888 //
5889 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5890 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5891 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5892 AddSlowPath(slow_path);
5893
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005894 __ B(slow_path->GetEntryLabel());
5895 __ Bind(slow_path->GetExitLabel());
5896}
5897
Roland Levillain44015862016-01-22 11:47:17 +00005898void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5899 Location out,
5900 Location ref,
5901 Location obj,
5902 uint32_t offset,
5903 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005904 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005905 // Baker's read barriers shall be handled by the fast path
5906 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5907 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005908 // If heap poisoning is enabled, unpoisoning will be taken care of
5909 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005910 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005911 } else if (kPoisonHeapReferences) {
5912 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5913 }
5914}
5915
Roland Levillain44015862016-01-22 11:47:17 +00005916void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5917 Location out,
5918 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005919 DCHECK(kEmitCompilerReadBarrier);
5920
Roland Levillain44015862016-01-22 11:47:17 +00005921 // Insert a slow path based read barrier *after* the GC root load.
5922 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005923 // Note that GC roots are not affected by heap poisoning, so we do
5924 // not need to do anything special for this here.
5925 SlowPathCodeARM64* slow_path =
5926 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5927 AddSlowPath(slow_path);
5928
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005929 __ B(slow_path->GetEntryLabel());
5930 __ Bind(slow_path->GetExitLabel());
5931}
5932
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005933void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5934 LocationSummary* locations =
5935 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5936 locations->SetInAt(0, Location::RequiresRegister());
5937 locations->SetOut(Location::RequiresRegister());
5938}
5939
5940void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5941 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005942 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005943 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005944 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005945 __ Ldr(XRegisterFrom(locations->Out()),
5946 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005947 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005948 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005949 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005950 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5951 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005952 __ Ldr(XRegisterFrom(locations->Out()),
5953 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005954 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005955}
5956
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005957static void PatchJitRootUse(uint8_t* code,
5958 const uint8_t* roots_data,
5959 vixl::aarch64::Literal<uint32_t>* literal,
5960 uint64_t index_in_table) {
5961 uint32_t literal_offset = literal->GetOffset();
5962 uintptr_t address =
5963 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
5964 uint8_t* data = code + literal_offset;
5965 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
5966}
5967
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005968void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
5969 for (const auto& entry : jit_string_patches_) {
5970 const auto& it = jit_string_roots_.find(entry.first);
5971 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005972 PatchJitRootUse(code, roots_data, entry.second, it->second);
5973 }
5974 for (const auto& entry : jit_class_patches_) {
5975 const auto& it = jit_class_roots_.find(entry.first);
5976 DCHECK(it != jit_class_roots_.end());
5977 PatchJitRootUse(code, roots_data, entry.second, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005978 }
5979}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005980
Alexandre Rames67555f72014-11-18 10:55:16 +00005981#undef __
5982#undef QUICK_ENTRY_POINT
5983
Alexandre Rames5319def2014-10-23 10:03:10 +01005984} // namespace arm64
5985} // namespace art