blob: d7d09afc24e330cf91b6c593217f1548796ca022 [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm_vixl.h"
18
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010019#include "arch/arm/asm_support_arm.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010020#include "arch/arm/instruction_set_features_arm.h"
Vladimir Marko86c87522020-05-11 16:55:55 +010021#include "arch/arm/jni_frame_arm.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000022#include "art_method-inl.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070023#include "base/bit_utils.h"
24#include "base/bit_utils_iterator.h"
Vladimir Marko7968cae2021-01-19 12:02:35 +000025#include "class_root-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010026#include "class_table.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010027#include "code_generator_utils.h"
28#include "common_arm.h"
29#include "compiled_method.h"
30#include "entrypoints/quick/quick_entrypoints.h"
31#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010032#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070033#include "heap_poisoning.h"
Nicolas Geoffray8b8d93d2020-09-17 14:30:01 +010034#include "interpreter/mterp/nterp.h"
Vladimir Marko6fd16062018-06-26 11:02:04 +010035#include "intrinsics.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010036#include "intrinsics_arm_vixl.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010037#include "linker/linker_patch.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010038#include "mirror/array-inl.h"
39#include "mirror/class-inl.h"
Vladimir Markoac3fcff2020-11-17 12:17:58 +000040#include "mirror/var_handle.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000041#include "scoped_thread_state_change-inl.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010042#include "thread.h"
43#include "utils/arm/assembler_arm_vixl.h"
44#include "utils/arm/managed_register_arm.h"
45#include "utils/assembler.h"
46#include "utils/stack_checks.h"
47
Vladimir Marko0a516052019-10-14 13:00:44 +000048namespace art {
Scott Wakelingfe885462016-09-22 10:24:38 +010049namespace arm {
50
51namespace vixl32 = vixl::aarch32;
52using namespace vixl32; // NOLINT(build/namespaces)
53
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +010054using helpers::DRegisterFrom;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010055using helpers::HighRegisterFrom;
Donghui Bai426b49c2016-11-08 14:55:38 +080056using helpers::InputDRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010057using helpers::InputOperandAt;
Scott Wakelingc34dba72016-10-03 10:14:44 +010058using helpers::InputRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010059using helpers::InputRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010060using helpers::InputSRegisterAt;
Anton Kirilov644032c2016-12-06 17:51:43 +000061using helpers::InputVRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010062using helpers::InputVRegisterAt;
Scott Wakelingb77051e2016-11-21 19:46:00 +000063using helpers::Int32ConstantFrom;
Anton Kirilov644032c2016-12-06 17:51:43 +000064using helpers::Int64ConstantFrom;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010065using helpers::LocationFrom;
66using helpers::LowRegisterFrom;
67using helpers::LowSRegisterFrom;
Donghui Bai426b49c2016-11-08 14:55:38 +080068using helpers::OperandFrom;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010069using helpers::OutputRegister;
70using helpers::OutputSRegister;
71using helpers::OutputVRegister;
72using helpers::RegisterFrom;
73using helpers::SRegisterFrom;
Anton Kirilov644032c2016-12-06 17:51:43 +000074using helpers::Uint64ConstantFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010075
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +010076using vixl::EmissionCheckScope;
Artem Serov0fb37192016-12-06 18:13:40 +000077using vixl::ExactAssemblyScope;
78using vixl::CodeBufferCheckScope;
79
Scott Wakelingfe885462016-09-22 10:24:38 +010080using RegisterList = vixl32::RegisterList;
81
82static bool ExpectedPairLayout(Location location) {
83 // We expected this for both core and fpu register pairs.
84 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
85}
Artem Serovd4cc5b22016-11-04 11:19:09 +000086// Use a local definition to prevent copying mistakes.
87static constexpr size_t kArmWordSize = static_cast<size_t>(kArmPointerSize);
88static constexpr size_t kArmBitsPerWord = kArmWordSize * kBitsPerByte;
Artem Serov551b28f2016-10-18 19:11:30 +010089static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Scott Wakelingfe885462016-09-22 10:24:38 +010090
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010091// Reference load (except object array loads) is using LDR Rt, [Rn, #offset] which can handle
92// offset < 4KiB. For offsets >= 4KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010093// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010094// the offset explicitly.
95constexpr uint32_t kReferenceLoadMinFarOffset = 4 * KB;
96
Roland Levillain5daa4952017-07-03 17:23:56 +010097// Using a base helps identify when we hit Marking Register check breakpoints.
98constexpr int kMarkingRegisterCheckBreakCodeBaseCode = 0x10;
99
Scott Wakelingfe885462016-09-22 10:24:38 +0100100#ifdef __
101#error "ARM Codegen VIXL macro-assembler macro already defined."
102#endif
103
Scott Wakelingfe885462016-09-22 10:24:38 +0100104// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
105#define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
107
108// Marker that code is yet to be, and must, be implemented.
109#define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented "
110
Vladimir Marko88abba22017-05-03 17:09:25 +0100111static inline bool CanEmitNarrowLdr(vixl32::Register rt, vixl32::Register rn, uint32_t offset) {
112 return rt.IsLow() && rn.IsLow() && offset < 32u;
113}
114
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100115class EmitAdrCode {
116 public:
117 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label)
118 : assembler_(assembler), rd_(rd), label_(label) {
Vladimir Markod887ed82018-08-14 13:52:12 +0000119 DCHECK(!assembler->AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100120 adr_location_ = assembler->GetCursorOffset();
121 assembler->adr(EncodingSize(Wide), rd, label);
122 }
123
124 ~EmitAdrCode() {
125 DCHECK(label_->IsBound());
126 // The ADR emitted by the assembler does not set the Thumb mode bit we need.
127 // TODO: Maybe extend VIXL to allow ADR for return address?
128 uint8_t* raw_adr = assembler_->GetBuffer()->GetOffsetAddress<uint8_t*>(adr_location_);
129 // Expecting ADR encoding T3 with `(offset & 1) == 0`.
130 DCHECK_EQ(raw_adr[1] & 0xfbu, 0xf2u); // Check bits 24-31, except 26.
131 DCHECK_EQ(raw_adr[0] & 0xffu, 0x0fu); // Check bits 16-23.
132 DCHECK_EQ(raw_adr[3] & 0x8fu, rd_.GetCode()); // Check bits 8-11 and 15.
133 DCHECK_EQ(raw_adr[2] & 0x01u, 0x00u); // Check bit 0, i.e. the `offset & 1`.
134 // Add the Thumb mode bit.
135 raw_adr[2] |= 0x01u;
136 }
137
138 private:
139 ArmVIXLMacroAssembler* const assembler_;
140 vixl32::Register rd_;
141 vixl32::Label* const label_;
142 int32_t adr_location_;
143};
144
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100145static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
146 InvokeRuntimeCallingConventionARMVIXL calling_convention;
147 RegisterSet caller_saves = RegisterSet::Empty();
148 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
149 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
150 // that the the kPrimNot result register is the same as the first argument register.
151 return caller_saves;
152}
153
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100154// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
155// for each live D registers they treat two corresponding S registers as live ones.
156//
157// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
158// from a list of contiguous S registers a list of contiguous D registers (processing first/last
159// S registers corner cases) and save/restore this new list treating them as D registers.
160// - decreasing code size
161// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
162// restored and then used in regular non SlowPath code as D register.
163//
164// For the following example (v means the S register is live):
165// D names: | D0 | D1 | D2 | D4 | ...
166// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
167// Live? | | v | v | v | v | v | v | | ...
168//
169// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
170// as D registers.
171//
172// TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers
173// for lists of floating-point registers.
174static size_t SaveContiguousSRegisterList(size_t first,
175 size_t last,
176 CodeGenerator* codegen,
177 size_t stack_offset) {
178 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
179 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
180 DCHECK_LE(first, last);
181 if ((first == last) && (first == 0)) {
182 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
183 return stack_offset + kSRegSizeInBytes;
184 }
185 if (first % 2 == 1) {
186 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
187 stack_offset += kSRegSizeInBytes;
188 }
189
190 bool save_last = false;
191 if (last % 2 == 0) {
192 save_last = true;
193 --last;
194 }
195
196 if (first < last) {
197 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
198 DCHECK_EQ((last - first + 1) % 2, 0u);
199 size_t number_of_d_regs = (last - first + 1) / 2;
200
201 if (number_of_d_regs == 1) {
202 __ Vstr(d_reg, MemOperand(sp, stack_offset));
203 } else if (number_of_d_regs > 1) {
204 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
205 vixl32::Register base = sp;
206 if (stack_offset != 0) {
207 base = temps.Acquire();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000208 __ Add(base, sp, Operand::From(stack_offset));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100209 }
210 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
211 }
212 stack_offset += number_of_d_regs * kDRegSizeInBytes;
213 }
214
215 if (save_last) {
216 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
217 stack_offset += kSRegSizeInBytes;
218 }
219
220 return stack_offset;
221}
222
223static size_t RestoreContiguousSRegisterList(size_t first,
224 size_t last,
225 CodeGenerator* codegen,
226 size_t stack_offset) {
227 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
228 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
229 DCHECK_LE(first, last);
230 if ((first == last) && (first == 0)) {
231 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
232 return stack_offset + kSRegSizeInBytes;
233 }
234 if (first % 2 == 1) {
235 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
236 stack_offset += kSRegSizeInBytes;
237 }
238
239 bool restore_last = false;
240 if (last % 2 == 0) {
241 restore_last = true;
242 --last;
243 }
244
245 if (first < last) {
246 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
247 DCHECK_EQ((last - first + 1) % 2, 0u);
248 size_t number_of_d_regs = (last - first + 1) / 2;
249 if (number_of_d_regs == 1) {
250 __ Vldr(d_reg, MemOperand(sp, stack_offset));
251 } else if (number_of_d_regs > 1) {
252 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
253 vixl32::Register base = sp;
254 if (stack_offset != 0) {
255 base = temps.Acquire();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000256 __ Add(base, sp, Operand::From(stack_offset));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100257 }
258 __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
259 }
260 stack_offset += number_of_d_regs * kDRegSizeInBytes;
261 }
262
263 if (restore_last) {
264 __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
265 stack_offset += kSRegSizeInBytes;
266 }
267
268 return stack_offset;
269}
270
Vladimir Markod5d2f2c2017-09-26 12:37:26 +0100271static LoadOperandType GetLoadOperandType(DataType::Type type) {
272 switch (type) {
273 case DataType::Type::kReference:
274 return kLoadWord;
275 case DataType::Type::kBool:
276 case DataType::Type::kUint8:
277 return kLoadUnsignedByte;
278 case DataType::Type::kInt8:
279 return kLoadSignedByte;
280 case DataType::Type::kUint16:
281 return kLoadUnsignedHalfword;
282 case DataType::Type::kInt16:
283 return kLoadSignedHalfword;
284 case DataType::Type::kInt32:
285 return kLoadWord;
286 case DataType::Type::kInt64:
287 return kLoadWordPair;
288 case DataType::Type::kFloat32:
289 return kLoadSWord;
290 case DataType::Type::kFloat64:
291 return kLoadDWord;
292 default:
293 LOG(FATAL) << "Unreachable type " << type;
294 UNREACHABLE();
295 }
296}
297
298static StoreOperandType GetStoreOperandType(DataType::Type type) {
299 switch (type) {
300 case DataType::Type::kReference:
301 return kStoreWord;
302 case DataType::Type::kBool:
303 case DataType::Type::kUint8:
304 case DataType::Type::kInt8:
305 return kStoreByte;
306 case DataType::Type::kUint16:
307 case DataType::Type::kInt16:
308 return kStoreHalfword;
309 case DataType::Type::kInt32:
310 return kStoreWord;
311 case DataType::Type::kInt64:
312 return kStoreWordPair;
313 case DataType::Type::kFloat32:
314 return kStoreSWord;
315 case DataType::Type::kFloat64:
316 return kStoreDWord;
317 default:
318 LOG(FATAL) << "Unreachable type " << type;
319 UNREACHABLE();
320 }
321}
322
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100323void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
324 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
325 size_t orig_offset = stack_offset;
326
Andreas Gampe3db70682018-12-26 15:12:03 -0800327 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100328 for (uint32_t i : LowToHighBits(core_spills)) {
329 // If the register holds an object, update the stack mask.
330 if (locations->RegisterContainsObject(i)) {
331 locations->SetStackBit(stack_offset / kVRegSize);
332 }
333 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
334 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
335 saved_core_stack_offsets_[i] = stack_offset;
336 stack_offset += kArmWordSize;
337 }
338
339 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
340 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset);
341
Andreas Gampe3db70682018-12-26 15:12:03 -0800342 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100343 orig_offset = stack_offset;
344 for (uint32_t i : LowToHighBits(fp_spills)) {
345 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
346 saved_fpu_stack_offsets_[i] = stack_offset;
347 stack_offset += kArmWordSize;
348 }
349
350 stack_offset = orig_offset;
351 while (fp_spills != 0u) {
352 uint32_t begin = CTZ(fp_spills);
353 uint32_t tmp = fp_spills + (1u << begin);
354 fp_spills &= tmp; // Clear the contiguous range of 1s.
355 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
356 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
357 }
358 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
359}
360
361void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
362 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
363 size_t orig_offset = stack_offset;
364
Andreas Gampe3db70682018-12-26 15:12:03 -0800365 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100366 for (uint32_t i : LowToHighBits(core_spills)) {
367 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
368 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
369 stack_offset += kArmWordSize;
370 }
371
372 // TODO(VIXL): Check the coherency of stack_offset after this with a test.
373 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
374 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset);
375
Andreas Gampe3db70682018-12-26 15:12:03 -0800376 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100377 while (fp_spills != 0u) {
378 uint32_t begin = CTZ(fp_spills);
379 uint32_t tmp = fp_spills + (1u << begin);
380 fp_spills &= tmp; // Clear the contiguous range of 1s.
381 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
382 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
383 }
384 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
385}
386
387class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
388 public:
389 explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {}
390
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100391 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100392 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
393 __ Bind(GetEntryLabel());
394 if (instruction_->CanThrowIntoCatchBlock()) {
395 // Live registers will be restored in the catch block if caught.
396 SaveLiveRegisters(codegen, instruction_->GetLocations());
397 }
398 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
399 instruction_,
400 instruction_->GetDexPc(),
401 this);
402 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
403 }
404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 bool IsFatal() const override { return true; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100406
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100407 const char* GetDescription() const override { return "NullCheckSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100408
409 private:
410 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL);
411};
412
Scott Wakelingfe885462016-09-22 10:24:38 +0100413class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
414 public:
415 explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction)
416 : SlowPathCodeARMVIXL(instruction) {}
417
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100418 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100419 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
Scott Wakelingfe885462016-09-22 10:24:38 +0100420 __ Bind(GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100421 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Scott Wakelingfe885462016-09-22 10:24:38 +0100422 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
423 }
424
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100425 bool IsFatal() const override { return true; }
Scott Wakelingfe885462016-09-22 10:24:38 +0100426
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100427 const char* GetDescription() const override { return "DivZeroCheckSlowPathARMVIXL"; }
Scott Wakelingfe885462016-09-22 10:24:38 +0100428
429 private:
430 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL);
431};
432
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100433class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
434 public:
435 SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor)
436 : SlowPathCodeARMVIXL(instruction), successor_(successor) {}
437
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100438 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100439 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
440 __ Bind(GetEntryLabel());
441 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
442 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
443 if (successor_ == nullptr) {
444 __ B(GetReturnLabel());
445 } else {
446 __ B(arm_codegen->GetLabelOf(successor_));
447 }
448 }
449
450 vixl32::Label* GetReturnLabel() {
451 DCHECK(successor_ == nullptr);
452 return &return_label_;
453 }
454
455 HBasicBlock* GetSuccessor() const {
456 return successor_;
457 }
458
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100459 const char* GetDescription() const override { return "SuspendCheckSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100460
461 private:
462 // If not null, the block to branch to after the suspend check.
463 HBasicBlock* const successor_;
464
465 // If `successor_` is null, the label to branch to after the suspend check.
466 vixl32::Label return_label_;
467
468 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL);
469};
470
Scott Wakelingc34dba72016-10-03 10:14:44 +0100471class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
472 public:
473 explicit BoundsCheckSlowPathARMVIXL(HBoundsCheck* instruction)
474 : SlowPathCodeARMVIXL(instruction) {}
475
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100476 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100477 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
478 LocationSummary* locations = instruction_->GetLocations();
479
480 __ Bind(GetEntryLabel());
481 if (instruction_->CanThrowIntoCatchBlock()) {
482 // Live registers will be restored in the catch block if caught.
483 SaveLiveRegisters(codegen, instruction_->GetLocations());
484 }
485 // We're moving two locations to locations that could overlap, so we need a parallel
486 // move resolver.
487 InvokeRuntimeCallingConventionARMVIXL calling_convention;
488 codegen->EmitParallelMoves(
489 locations->InAt(0),
490 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type::kInt32,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100492 locations->InAt(1),
493 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100494 DataType::Type::kInt32);
Scott Wakelingc34dba72016-10-03 10:14:44 +0100495 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
496 ? kQuickThrowStringBounds
497 : kQuickThrowArrayBounds;
498 arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
499 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
500 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
501 }
502
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100503 bool IsFatal() const override { return true; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100504
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100505 const char* GetDescription() const override { return "BoundsCheckSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100506
507 private:
508 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARMVIXL);
509};
510
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100511class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
512 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100513 LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at)
514 : SlowPathCodeARMVIXL(at), cls_(cls) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100515 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100516 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100517 }
518
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100519 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000520 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000521 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100522 const uint32_t dex_pc = instruction_->GetDexPc();
523 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
524 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100525
526 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
527 __ Bind(GetEntryLabel());
528 SaveLiveRegisters(codegen, locations);
529
530 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100531 if (must_resolve_type) {
532 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm_codegen->GetGraph()->GetDexFile()));
533 dex::TypeIndex type_index = cls_->GetTypeIndex();
534 __ Mov(calling_convention.GetRegisterAt(0), type_index.index_);
Vladimir Marko8f63f102020-09-28 12:10:28 +0100535 if (cls_->NeedsAccessCheck()) {
536 CheckEntrypointTypes<kQuickResolveTypeAndVerifyAccess, void*, uint32_t>();
537 arm_codegen->InvokeRuntime(kQuickResolveTypeAndVerifyAccess, instruction_, dex_pc, this);
538 } else {
539 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
540 arm_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
541 }
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100542 // If we also must_do_clinit, the resolved type is now in the correct register.
543 } else {
544 DCHECK(must_do_clinit);
545 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
546 arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), source);
547 }
548 if (must_do_clinit) {
549 arm_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
550 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100551 }
552
553 // Move the class to the desired location.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100554 if (out.IsValid()) {
555 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
556 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
557 }
558 RestoreLiveRegisters(codegen, locations);
559 __ B(GetExitLabel());
560 }
561
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100562 const char* GetDescription() const override { return "LoadClassSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100563
564 private:
565 // The class this slow path will load.
566 HLoadClass* const cls_;
567
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100568 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL);
569};
570
Artem Serovd4cc5b22016-11-04 11:19:09 +0000571class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
572 public:
573 explicit LoadStringSlowPathARMVIXL(HLoadString* instruction)
574 : SlowPathCodeARMVIXL(instruction) {}
575
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100576 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoea4c1262017-02-06 19:59:33 +0000577 DCHECK(instruction_->IsLoadString());
578 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000579 LocationSummary* locations = instruction_->GetLocations();
580 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000581 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Artem Serovd4cc5b22016-11-04 11:19:09 +0000582
583 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
584 __ Bind(GetEntryLabel());
585 SaveLiveRegisters(codegen, locations);
586
587 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000588 __ Mov(calling_convention.GetRegisterAt(0), string_index.index_);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000589 arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
590 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
591
Artem Serovd4cc5b22016-11-04 11:19:09 +0000592 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
593 RestoreLiveRegisters(codegen, locations);
594
595 __ B(GetExitLabel());
596 }
597
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100598 const char* GetDescription() const override { return "LoadStringSlowPathARMVIXL"; }
Artem Serovd4cc5b22016-11-04 11:19:09 +0000599
600 private:
601 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARMVIXL);
602};
603
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100604class TypeCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
605 public:
606 TypeCheckSlowPathARMVIXL(HInstruction* instruction, bool is_fatal)
607 : SlowPathCodeARMVIXL(instruction), is_fatal_(is_fatal) {}
608
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100609 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100610 LocationSummary* locations = instruction_->GetLocations();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100611 DCHECK(instruction_->IsCheckCast()
612 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
613
614 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
615 __ Bind(GetEntryLabel());
616
Vladimir Marko87584542017-12-12 17:47:52 +0000617 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100618 SaveLiveRegisters(codegen, locations);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100619 }
620
621 // We're moving two locations to locations that could overlap, so we need a parallel
622 // move resolver.
623 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100624
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800625 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800626 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100627 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800628 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800629 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100630 DataType::Type::kReference);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100631 if (instruction_->IsInstanceOf()) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100632 arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
633 instruction_,
634 instruction_->GetDexPc(),
635 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800636 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Artem Serovcfbe9132016-10-14 15:58:56 +0100637 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100638 } else {
639 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800640 arm_codegen->InvokeRuntime(kQuickCheckInstanceOf,
641 instruction_,
642 instruction_->GetDexPc(),
643 this);
644 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100645 }
646
647 if (!is_fatal_) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100648 RestoreLiveRegisters(codegen, locations);
649 __ B(GetExitLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100650 }
651 }
652
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100653 const char* GetDescription() const override { return "TypeCheckSlowPathARMVIXL"; }
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100654
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100655 bool IsFatal() const override { return is_fatal_; }
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100656
657 private:
658 const bool is_fatal_;
659
660 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARMVIXL);
661};
662
Scott Wakelingc34dba72016-10-03 10:14:44 +0100663class DeoptimizationSlowPathARMVIXL : public SlowPathCodeARMVIXL {
664 public:
665 explicit DeoptimizationSlowPathARMVIXL(HDeoptimize* instruction)
666 : SlowPathCodeARMVIXL(instruction) {}
667
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100668 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100669 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
670 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100671 LocationSummary* locations = instruction_->GetLocations();
672 SaveLiveRegisters(codegen, locations);
673 InvokeRuntimeCallingConventionARMVIXL calling_convention;
674 __ Mov(calling_convention.GetRegisterAt(0),
675 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
676
Scott Wakelingc34dba72016-10-03 10:14:44 +0100677 arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100678 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Scott Wakelingc34dba72016-10-03 10:14:44 +0100679 }
680
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100681 const char* GetDescription() const override { return "DeoptimizationSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100682
683 private:
684 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARMVIXL);
685};
686
687class ArraySetSlowPathARMVIXL : public SlowPathCodeARMVIXL {
688 public:
689 explicit ArraySetSlowPathARMVIXL(HInstruction* instruction) : SlowPathCodeARMVIXL(instruction) {}
690
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100691 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100692 LocationSummary* locations = instruction_->GetLocations();
693 __ Bind(GetEntryLabel());
694 SaveLiveRegisters(codegen, locations);
695
696 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100697 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Scott Wakelingc34dba72016-10-03 10:14:44 +0100698 parallel_move.AddMove(
699 locations->InAt(0),
700 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100701 DataType::Type::kReference,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100702 nullptr);
703 parallel_move.AddMove(
704 locations->InAt(1),
705 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100706 DataType::Type::kInt32,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100707 nullptr);
708 parallel_move.AddMove(
709 locations->InAt(2),
710 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100711 DataType::Type::kReference,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100712 nullptr);
713 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
714
715 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
716 arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
717 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
718 RestoreLiveRegisters(codegen, locations);
719 __ B(GetExitLabel());
720 }
721
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100722 const char* GetDescription() const override { return "ArraySetSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100723
724 private:
725 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARMVIXL);
726};
727
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000728// Slow path generating a read barrier for a heap reference.
729class ReadBarrierForHeapReferenceSlowPathARMVIXL : public SlowPathCodeARMVIXL {
730 public:
731 ReadBarrierForHeapReferenceSlowPathARMVIXL(HInstruction* instruction,
732 Location out,
733 Location ref,
734 Location obj,
735 uint32_t offset,
736 Location index)
737 : SlowPathCodeARMVIXL(instruction),
738 out_(out),
739 ref_(ref),
740 obj_(obj),
741 offset_(offset),
742 index_(index) {
743 DCHECK(kEmitCompilerReadBarrier);
744 // If `obj` is equal to `out` or `ref`, it means the initial object
745 // has been overwritten by (or after) the heap object reference load
746 // to be instrumented, e.g.:
747 //
748 // __ LoadFromOffset(kLoadWord, out, out, offset);
749 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
750 //
751 // In that case, we have lost the information about the original
752 // object, and the emitted read barrier cannot work properly.
753 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
754 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
755 }
756
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100757 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000758 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
759 LocationSummary* locations = instruction_->GetLocations();
760 vixl32::Register reg_out = RegisterFrom(out_);
761 DCHECK(locations->CanCall());
762 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.GetCode()));
763 DCHECK(instruction_->IsInstanceFieldGet() ||
764 instruction_->IsStaticFieldGet() ||
765 instruction_->IsArrayGet() ||
766 instruction_->IsInstanceOf() ||
767 instruction_->IsCheckCast() ||
Vladimir Marko94d2c812020-11-05 10:04:45 +0000768 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000769 << "Unexpected instruction in read barrier for heap reference slow path: "
770 << instruction_->DebugName();
771 // The read barrier instrumentation of object ArrayGet
772 // instructions does not support the HIntermediateAddress
773 // instruction.
774 DCHECK(!(instruction_->IsArrayGet() &&
775 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
776
777 __ Bind(GetEntryLabel());
778 SaveLiveRegisters(codegen, locations);
779
780 // We may have to change the index's value, but as `index_` is a
781 // constant member (like other "inputs" of this slow path),
782 // introduce a copy of it, `index`.
783 Location index = index_;
784 if (index_.IsValid()) {
785 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
786 if (instruction_->IsArrayGet()) {
787 // Compute the actual memory offset and store it in `index`.
788 vixl32::Register index_reg = RegisterFrom(index_);
789 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg.GetCode()));
790 if (codegen->IsCoreCalleeSaveRegister(index_reg.GetCode())) {
791 // We are about to change the value of `index_reg` (see the
Roland Levillain9983e302017-07-14 14:34:22 +0100792 // calls to art::arm::ArmVIXLMacroAssembler::Lsl and
793 // art::arm::ArmVIXLMacroAssembler::Add below), but it has
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000794 // not been saved by the previous call to
795 // art::SlowPathCode::SaveLiveRegisters, as it is a
796 // callee-save register --
797 // art::SlowPathCode::SaveLiveRegisters does not consider
798 // callee-save registers, as it has been designed with the
799 // assumption that callee-save registers are supposed to be
800 // handled by the called function. So, as a callee-save
801 // register, `index_reg` _would_ eventually be saved onto
802 // the stack, but it would be too late: we would have
803 // changed its value earlier. Therefore, we manually save
804 // it here into another freely available register,
805 // `free_reg`, chosen of course among the caller-save
806 // registers (as a callee-save `free_reg` register would
807 // exhibit the same problem).
808 //
809 // Note we could have requested a temporary register from
810 // the register allocator instead; but we prefer not to, as
811 // this is a slow path, and we know we can find a
812 // caller-save register that is available.
813 vixl32::Register free_reg = FindAvailableCallerSaveRegister(codegen);
814 __ Mov(free_reg, index_reg);
815 index_reg = free_reg;
816 index = LocationFrom(index_reg);
817 } else {
818 // The initial register stored in `index_` has already been
819 // saved in the call to art::SlowPathCode::SaveLiveRegisters
820 // (as it is not a callee-save register), so we can freely
821 // use it.
822 }
823 // Shifting the index value contained in `index_reg` by the scale
824 // factor (2) cannot overflow in practice, as the runtime is
825 // unable to allocate object arrays with a size larger than
826 // 2^26 - 1 (that is, 2^28 - 4 bytes).
827 __ Lsl(index_reg, index_reg, TIMES_4);
828 static_assert(
829 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
830 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
831 __ Add(index_reg, index_reg, offset_);
832 } else {
833 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
834 // intrinsics, `index_` is not shifted by a scale factor of 2
835 // (as in the case of ArrayGet), as it is actually an offset
836 // to an object field within an object.
837 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
838 DCHECK(instruction_->GetLocations()->Intrinsified());
Vladimir Markoac3fcff2020-11-17 12:17:58 +0000839 Intrinsics intrinsic = instruction_->AsInvoke()->GetIntrinsic();
840 DCHECK(intrinsic == Intrinsics::kUnsafeGetObject ||
841 intrinsic == Intrinsics::kUnsafeGetObjectVolatile ||
842 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) ==
Vladimir Marko3d350a82020-11-18 14:14:27 +0000843 mirror::VarHandle::AccessModeTemplate::kGet ||
844 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) ==
845 mirror::VarHandle::AccessModeTemplate::kCompareAndSet ||
846 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) ==
Vladimir Marko526569a2020-11-30 15:48:38 +0000847 mirror::VarHandle::AccessModeTemplate::kCompareAndExchange ||
848 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) ==
849 mirror::VarHandle::AccessModeTemplate::kGetAndUpdate)
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000850 << instruction_->AsInvoke()->GetIntrinsic();
851 DCHECK_EQ(offset_, 0U);
Vladimir Markoac3fcff2020-11-17 12:17:58 +0000852 // Though UnsafeGet's offset location is a register pair, we only pass the low
853 // part (high part is irrelevant for 32-bit addresses) to the slow path.
854 // For VarHandle intrinsics, the index is always just a register.
855 DCHECK(index_.IsRegister());
856 index = index_;
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000857 }
858 }
859
860 // We're moving two or three locations to locations that could
861 // overlap, so we need a parallel move resolver.
862 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100863 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000864 parallel_move.AddMove(ref_,
865 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100866 DataType::Type::kReference,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000867 nullptr);
868 parallel_move.AddMove(obj_,
869 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100870 DataType::Type::kReference,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000871 nullptr);
872 if (index.IsValid()) {
873 parallel_move.AddMove(index,
874 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100875 DataType::Type::kInt32,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000876 nullptr);
877 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
878 } else {
879 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
880 __ Mov(calling_convention.GetRegisterAt(2), offset_);
881 }
882 arm_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
883 CheckEntrypointTypes<
884 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
885 arm_codegen->Move32(out_, LocationFrom(r0));
886
887 RestoreLiveRegisters(codegen, locations);
888 __ B(GetExitLabel());
889 }
890
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100891 const char* GetDescription() const override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000892 return "ReadBarrierForHeapReferenceSlowPathARMVIXL";
893 }
894
895 private:
896 vixl32::Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
897 uint32_t ref = RegisterFrom(ref_).GetCode();
898 uint32_t obj = RegisterFrom(obj_).GetCode();
899 for (uint32_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
900 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
901 return vixl32::Register(i);
902 }
903 }
904 // We shall never fail to find a free caller-save register, as
905 // there are more than two core caller-save registers on ARM
906 // (meaning it is possible to find one which is different from
907 // `ref` and `obj`).
908 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
909 LOG(FATAL) << "Could not find a free caller-save register";
910 UNREACHABLE();
911 }
912
913 const Location out_;
914 const Location ref_;
915 const Location obj_;
916 const uint32_t offset_;
917 // An additional location containing an index to an array.
918 // Only used for HArrayGet and the UnsafeGetObject &
919 // UnsafeGetObjectVolatile intrinsics.
920 const Location index_;
921
922 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARMVIXL);
923};
924
925// Slow path generating a read barrier for a GC root.
926class ReadBarrierForRootSlowPathARMVIXL : public SlowPathCodeARMVIXL {
927 public:
928 ReadBarrierForRootSlowPathARMVIXL(HInstruction* instruction, Location out, Location root)
929 : SlowPathCodeARMVIXL(instruction), out_(out), root_(root) {
930 DCHECK(kEmitCompilerReadBarrier);
931 }
932
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100933 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000934 LocationSummary* locations = instruction_->GetLocations();
935 vixl32::Register reg_out = RegisterFrom(out_);
936 DCHECK(locations->CanCall());
937 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.GetCode()));
Vladimir Markoac3fcff2020-11-17 12:17:58 +0000938 DCHECK(instruction_->IsLoadClass() ||
939 instruction_->IsLoadString() ||
940 (instruction_->IsInvoke() && instruction_->GetLocations()->Intrinsified()))
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000941 << "Unexpected instruction in read barrier for GC root slow path: "
942 << instruction_->DebugName();
943
944 __ Bind(GetEntryLabel());
945 SaveLiveRegisters(codegen, locations);
946
947 InvokeRuntimeCallingConventionARMVIXL calling_convention;
948 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
949 arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), root_);
950 arm_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
951 instruction_,
952 instruction_->GetDexPc(),
953 this);
954 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
955 arm_codegen->Move32(out_, LocationFrom(r0));
956
957 RestoreLiveRegisters(codegen, locations);
958 __ B(GetExitLabel());
959 }
960
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100961 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathARMVIXL"; }
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000962
963 private:
964 const Location out_;
965 const Location root_;
966
967 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARMVIXL);
968};
Scott Wakelingc34dba72016-10-03 10:14:44 +0100969
Scott Wakelingfe885462016-09-22 10:24:38 +0100970inline vixl32::Condition ARMCondition(IfCondition cond) {
971 switch (cond) {
972 case kCondEQ: return eq;
973 case kCondNE: return ne;
974 case kCondLT: return lt;
975 case kCondLE: return le;
976 case kCondGT: return gt;
977 case kCondGE: return ge;
978 case kCondB: return lo;
979 case kCondBE: return ls;
980 case kCondA: return hi;
981 case kCondAE: return hs;
982 }
983 LOG(FATAL) << "Unreachable";
984 UNREACHABLE();
985}
986
987// Maps signed condition to unsigned condition.
988inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) {
989 switch (cond) {
990 case kCondEQ: return eq;
991 case kCondNE: return ne;
992 // Signed to unsigned.
993 case kCondLT: return lo;
994 case kCondLE: return ls;
995 case kCondGT: return hi;
996 case kCondGE: return hs;
997 // Unsigned remain unchanged.
998 case kCondB: return lo;
999 case kCondBE: return ls;
1000 case kCondA: return hi;
1001 case kCondAE: return hs;
1002 }
1003 LOG(FATAL) << "Unreachable";
1004 UNREACHABLE();
1005}
1006
1007inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
1008 // The ARM condition codes can express all the necessary branches, see the
1009 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
1010 // There is no dex instruction or HIR that would need the missing conditions
1011 // "equal or unordered" or "not equal".
1012 switch (cond) {
1013 case kCondEQ: return eq;
1014 case kCondNE: return ne /* unordered */;
1015 case kCondLT: return gt_bias ? cc : lt /* unordered */;
1016 case kCondLE: return gt_bias ? ls : le /* unordered */;
1017 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
1018 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
1019 default:
1020 LOG(FATAL) << "UNREACHABLE";
1021 UNREACHABLE();
1022 }
1023}
1024
Anton Kirilov74234da2017-01-13 14:42:47 +00001025inline ShiftType ShiftFromOpKind(HDataProcWithShifterOp::OpKind op_kind) {
1026 switch (op_kind) {
1027 case HDataProcWithShifterOp::kASR: return ShiftType::ASR;
1028 case HDataProcWithShifterOp::kLSL: return ShiftType::LSL;
1029 case HDataProcWithShifterOp::kLSR: return ShiftType::LSR;
1030 default:
1031 LOG(FATAL) << "Unexpected op kind " << op_kind;
1032 UNREACHABLE();
1033 }
1034}
1035
Scott Wakelingfe885462016-09-22 10:24:38 +01001036void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const {
1037 stream << vixl32::Register(reg);
1038}
1039
1040void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
1041 stream << vixl32::SRegister(reg);
1042}
1043
Vladimir Markoa0431112018-06-25 09:32:54 +01001044const ArmInstructionSetFeatures& CodeGeneratorARMVIXL::GetInstructionSetFeatures() const {
1045 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArmInstructionSetFeatures();
1046}
1047
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001048static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) {
Scott Wakelingfe885462016-09-22 10:24:38 +01001049 uint32_t mask = 0;
1050 for (uint32_t i = regs.GetFirstSRegister().GetCode();
1051 i <= regs.GetLastSRegister().GetCode();
1052 ++i) {
1053 mask |= (1 << i);
1054 }
1055 return mask;
1056}
1057
Artem Serovd4cc5b22016-11-04 11:19:09 +00001058// Saves the register in the stack. Returns the size taken on stack.
1059size_t CodeGeneratorARMVIXL::SaveCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
1060 uint32_t reg_id ATTRIBUTE_UNUSED) {
1061 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001062 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001063}
1064
1065// Restores the register from the stack. Returns the size taken on stack.
1066size_t CodeGeneratorARMVIXL::RestoreCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
1067 uint32_t reg_id ATTRIBUTE_UNUSED) {
1068 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001069 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001070}
1071
1072size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1073 uint32_t reg_id ATTRIBUTE_UNUSED) {
1074 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001075 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001076}
1077
1078size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1079 uint32_t reg_id ATTRIBUTE_UNUSED) {
1080 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001081 UNREACHABLE();
Anton Kirilove28d9ae2016-10-25 18:17:23 +01001082}
1083
Anton Kirilov74234da2017-01-13 14:42:47 +00001084static void GenerateDataProcInstruction(HInstruction::InstructionKind kind,
1085 vixl32::Register out,
1086 vixl32::Register first,
1087 const Operand& second,
1088 CodeGeneratorARMVIXL* codegen) {
1089 if (second.IsImmediate() && second.GetImmediate() == 0) {
1090 const Operand in = kind == HInstruction::kAnd
1091 ? Operand(0)
1092 : Operand(first);
1093
1094 __ Mov(out, in);
1095 } else {
1096 switch (kind) {
1097 case HInstruction::kAdd:
1098 __ Add(out, first, second);
1099 break;
1100 case HInstruction::kAnd:
1101 __ And(out, first, second);
1102 break;
1103 case HInstruction::kOr:
1104 __ Orr(out, first, second);
1105 break;
1106 case HInstruction::kSub:
1107 __ Sub(out, first, second);
1108 break;
1109 case HInstruction::kXor:
1110 __ Eor(out, first, second);
1111 break;
1112 default:
1113 LOG(FATAL) << "Unexpected instruction kind: " << kind;
1114 UNREACHABLE();
1115 }
1116 }
1117}
1118
1119static void GenerateDataProc(HInstruction::InstructionKind kind,
1120 const Location& out,
1121 const Location& first,
1122 const Operand& second_lo,
1123 const Operand& second_hi,
1124 CodeGeneratorARMVIXL* codegen) {
1125 const vixl32::Register first_hi = HighRegisterFrom(first);
1126 const vixl32::Register first_lo = LowRegisterFrom(first);
1127 const vixl32::Register out_hi = HighRegisterFrom(out);
1128 const vixl32::Register out_lo = LowRegisterFrom(out);
1129
1130 if (kind == HInstruction::kAdd) {
1131 __ Adds(out_lo, first_lo, second_lo);
1132 __ Adc(out_hi, first_hi, second_hi);
1133 } else if (kind == HInstruction::kSub) {
1134 __ Subs(out_lo, first_lo, second_lo);
1135 __ Sbc(out_hi, first_hi, second_hi);
1136 } else {
1137 GenerateDataProcInstruction(kind, out_lo, first_lo, second_lo, codegen);
1138 GenerateDataProcInstruction(kind, out_hi, first_hi, second_hi, codegen);
1139 }
1140}
1141
1142static Operand GetShifterOperand(vixl32::Register rm, ShiftType shift, uint32_t shift_imm) {
1143 return shift_imm == 0 ? Operand(rm) : Operand(rm, shift, shift_imm);
1144}
1145
1146static void GenerateLongDataProc(HDataProcWithShifterOp* instruction,
1147 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001148 DCHECK_EQ(instruction->GetType(), DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00001149 DCHECK(HDataProcWithShifterOp::IsShiftOp(instruction->GetOpKind()));
1150
1151 const LocationSummary* const locations = instruction->GetLocations();
1152 const uint32_t shift_value = instruction->GetShiftAmount();
1153 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
1154 const Location first = locations->InAt(0);
1155 const Location second = locations->InAt(1);
1156 const Location out = locations->Out();
1157 const vixl32::Register first_hi = HighRegisterFrom(first);
1158 const vixl32::Register first_lo = LowRegisterFrom(first);
1159 const vixl32::Register out_hi = HighRegisterFrom(out);
1160 const vixl32::Register out_lo = LowRegisterFrom(out);
1161 const vixl32::Register second_hi = HighRegisterFrom(second);
1162 const vixl32::Register second_lo = LowRegisterFrom(second);
1163 const ShiftType shift = ShiftFromOpKind(instruction->GetOpKind());
1164
1165 if (shift_value >= 32) {
1166 if (shift == ShiftType::LSL) {
1167 GenerateDataProcInstruction(kind,
1168 out_hi,
1169 first_hi,
1170 Operand(second_lo, ShiftType::LSL, shift_value - 32),
1171 codegen);
1172 GenerateDataProcInstruction(kind, out_lo, first_lo, 0, codegen);
1173 } else if (shift == ShiftType::ASR) {
1174 GenerateDataProc(kind,
1175 out,
1176 first,
1177 GetShifterOperand(second_hi, ShiftType::ASR, shift_value - 32),
1178 Operand(second_hi, ShiftType::ASR, 31),
1179 codegen);
1180 } else {
1181 DCHECK_EQ(shift, ShiftType::LSR);
1182 GenerateDataProc(kind,
1183 out,
1184 first,
1185 GetShifterOperand(second_hi, ShiftType::LSR, shift_value - 32),
1186 0,
1187 codegen);
1188 }
1189 } else {
1190 DCHECK_GT(shift_value, 1U);
1191 DCHECK_LT(shift_value, 32U);
1192
1193 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1194
1195 if (shift == ShiftType::LSL) {
1196 // We are not doing this for HInstruction::kAdd because the output will require
1197 // Location::kOutputOverlap; not applicable to other cases.
1198 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1199 GenerateDataProcInstruction(kind,
1200 out_hi,
1201 first_hi,
1202 Operand(second_hi, ShiftType::LSL, shift_value),
1203 codegen);
1204 GenerateDataProcInstruction(kind,
1205 out_hi,
1206 out_hi,
1207 Operand(second_lo, ShiftType::LSR, 32 - shift_value),
1208 codegen);
1209 GenerateDataProcInstruction(kind,
1210 out_lo,
1211 first_lo,
1212 Operand(second_lo, ShiftType::LSL, shift_value),
1213 codegen);
1214 } else {
1215 const vixl32::Register temp = temps.Acquire();
1216
1217 __ Lsl(temp, second_hi, shift_value);
1218 __ Orr(temp, temp, Operand(second_lo, ShiftType::LSR, 32 - shift_value));
1219 GenerateDataProc(kind,
1220 out,
1221 first,
1222 Operand(second_lo, ShiftType::LSL, shift_value),
1223 temp,
1224 codegen);
1225 }
1226 } else {
1227 DCHECK(shift == ShiftType::ASR || shift == ShiftType::LSR);
1228
1229 // We are not doing this for HInstruction::kAdd because the output will require
1230 // Location::kOutputOverlap; not applicable to other cases.
1231 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1232 GenerateDataProcInstruction(kind,
1233 out_lo,
1234 first_lo,
1235 Operand(second_lo, ShiftType::LSR, shift_value),
1236 codegen);
1237 GenerateDataProcInstruction(kind,
1238 out_lo,
1239 out_lo,
1240 Operand(second_hi, ShiftType::LSL, 32 - shift_value),
1241 codegen);
1242 GenerateDataProcInstruction(kind,
1243 out_hi,
1244 first_hi,
1245 Operand(second_hi, shift, shift_value),
1246 codegen);
1247 } else {
1248 const vixl32::Register temp = temps.Acquire();
1249
1250 __ Lsr(temp, second_lo, shift_value);
1251 __ Orr(temp, temp, Operand(second_hi, ShiftType::LSL, 32 - shift_value));
1252 GenerateDataProc(kind,
1253 out,
1254 first,
1255 temp,
1256 Operand(second_hi, shift, shift_value),
1257 codegen);
1258 }
1259 }
1260 }
1261}
1262
Donghui Bai426b49c2016-11-08 14:55:38 +08001263static void GenerateVcmp(HInstruction* instruction, CodeGeneratorARMVIXL* codegen) {
1264 const Location rhs_loc = instruction->GetLocations()->InAt(1);
1265 if (rhs_loc.IsConstant()) {
1266 // 0.0 is the only immediate that can be encoded directly in
1267 // a VCMP instruction.
1268 //
1269 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
1270 // specify that in a floating-point comparison, positive zero
1271 // and negative zero are considered equal, so we can use the
1272 // literal 0.0 for both cases here.
1273 //
1274 // Note however that some methods (Float.equal, Float.compare,
1275 // Float.compareTo, Double.equal, Double.compare,
1276 // Double.compareTo, Math.max, Math.min, StrictMath.max,
1277 // StrictMath.min) consider 0.0 to be (strictly) greater than
1278 // -0.0. So if we ever translate calls to these methods into a
1279 // HCompare instruction, we must handle the -0.0 case with
1280 // care here.
1281 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
1282
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001283 const DataType::Type type = instruction->InputAt(0)->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08001284
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 if (type == DataType::Type::kFloat32) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001286 __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0);
1287 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001288 DCHECK_EQ(type, DataType::Type::kFloat64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001289 __ Vcmp(F64, InputDRegisterAt(instruction, 0), 0.0);
1290 }
1291 } else {
1292 __ Vcmp(InputVRegisterAt(instruction, 0), InputVRegisterAt(instruction, 1));
1293 }
1294}
1295
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001296static int64_t AdjustConstantForCondition(int64_t value,
1297 IfCondition* condition,
1298 IfCondition* opposite) {
1299 if (value == 1) {
1300 if (*condition == kCondB) {
1301 value = 0;
1302 *condition = kCondEQ;
1303 *opposite = kCondNE;
1304 } else if (*condition == kCondAE) {
1305 value = 0;
1306 *condition = kCondNE;
1307 *opposite = kCondEQ;
1308 }
1309 } else if (value == -1) {
1310 if (*condition == kCondGT) {
1311 value = 0;
1312 *condition = kCondGE;
1313 *opposite = kCondLT;
1314 } else if (*condition == kCondLE) {
1315 value = 0;
1316 *condition = kCondLT;
1317 *opposite = kCondGE;
1318 }
1319 }
1320
1321 return value;
1322}
1323
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001324static std::pair<vixl32::Condition, vixl32::Condition> GenerateLongTestConstant(
1325 HCondition* condition,
1326 bool invert,
1327 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001328 DCHECK_EQ(condition->GetLeft()->GetType(), DataType::Type::kInt64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001329
1330 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001331 IfCondition cond = condition->GetCondition();
1332 IfCondition opposite = condition->GetOppositeCondition();
1333
1334 if (invert) {
1335 std::swap(cond, opposite);
1336 }
1337
1338 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001339 const Location left = locations->InAt(0);
1340 const Location right = locations->InAt(1);
1341
1342 DCHECK(right.IsConstant());
1343
1344 const vixl32::Register left_high = HighRegisterFrom(left);
1345 const vixl32::Register left_low = LowRegisterFrom(left);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001346 int64_t value = AdjustConstantForCondition(Int64ConstantFrom(right), &cond, &opposite);
1347 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1348
1349 // Comparisons against 0 are common enough to deserve special attention.
1350 if (value == 0) {
1351 switch (cond) {
1352 case kCondNE:
1353 // x > 0 iff x != 0 when the comparison is unsigned.
1354 case kCondA:
1355 ret = std::make_pair(ne, eq);
1356 FALLTHROUGH_INTENDED;
1357 case kCondEQ:
1358 // x <= 0 iff x == 0 when the comparison is unsigned.
1359 case kCondBE:
1360 __ Orrs(temps.Acquire(), left_low, left_high);
1361 return ret;
1362 case kCondLT:
1363 case kCondGE:
1364 __ Cmp(left_high, 0);
1365 return std::make_pair(ARMCondition(cond), ARMCondition(opposite));
1366 // Trivially true or false.
1367 case kCondB:
1368 ret = std::make_pair(ne, eq);
1369 FALLTHROUGH_INTENDED;
1370 case kCondAE:
1371 __ Cmp(left_low, left_low);
1372 return ret;
1373 default:
1374 break;
1375 }
1376 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001377
1378 switch (cond) {
1379 case kCondEQ:
1380 case kCondNE:
1381 case kCondB:
1382 case kCondBE:
1383 case kCondA:
1384 case kCondAE: {
Anton Kirilov23b752b2017-07-20 14:40:44 +01001385 const uint32_t value_low = Low32Bits(value);
1386 Operand operand_low(value_low);
1387
Donghui Bai426b49c2016-11-08 14:55:38 +08001388 __ Cmp(left_high, High32Bits(value));
1389
Anton Kirilov23b752b2017-07-20 14:40:44 +01001390 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
1391 // we must ensure that the operands corresponding to the least significant
1392 // halves of the inputs fit into a 16-bit CMP encoding.
1393 if (!left_low.IsLow() || !IsUint<8>(value_low)) {
1394 operand_low = Operand(temps.Acquire());
1395 __ Mov(LeaveFlags, operand_low.GetBaseRegister(), value_low);
1396 }
1397
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001398 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08001399 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1400 2 * vixl32::k16BitT32InstructionSizeInBytes,
1401 CodeBufferCheckScope::kExactSize);
1402
1403 __ it(eq);
Anton Kirilov23b752b2017-07-20 14:40:44 +01001404 __ cmp(eq, left_low, operand_low);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001405 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001406 break;
1407 }
1408 case kCondLE:
1409 case kCondGT:
1410 // Trivially true or false.
1411 if (value == std::numeric_limits<int64_t>::max()) {
1412 __ Cmp(left_low, left_low);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001413 ret = cond == kCondLE ? std::make_pair(eq, ne) : std::make_pair(ne, eq);
Donghui Bai426b49c2016-11-08 14:55:38 +08001414 break;
1415 }
1416
1417 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001418 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001419 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001420 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001421 } else {
1422 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001423 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001424 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001425 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001426 }
1427
1428 value++;
1429 FALLTHROUGH_INTENDED;
1430 case kCondGE:
1431 case kCondLT: {
Donghui Bai426b49c2016-11-08 14:55:38 +08001432 __ Cmp(left_low, Low32Bits(value));
1433 __ Sbcs(temps.Acquire(), left_high, High32Bits(value));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001434 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001435 break;
1436 }
1437 default:
1438 LOG(FATAL) << "Unreachable";
1439 UNREACHABLE();
1440 }
1441
1442 return ret;
1443}
1444
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001445static std::pair<vixl32::Condition, vixl32::Condition> GenerateLongTest(
1446 HCondition* condition,
1447 bool invert,
1448 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001449 DCHECK_EQ(condition->GetLeft()->GetType(), DataType::Type::kInt64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001450
1451 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001452 IfCondition cond = condition->GetCondition();
1453 IfCondition opposite = condition->GetOppositeCondition();
1454
1455 if (invert) {
1456 std::swap(cond, opposite);
1457 }
1458
1459 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001460 Location left = locations->InAt(0);
1461 Location right = locations->InAt(1);
1462
1463 DCHECK(right.IsRegisterPair());
1464
1465 switch (cond) {
1466 case kCondEQ:
1467 case kCondNE:
1468 case kCondB:
1469 case kCondBE:
1470 case kCondA:
1471 case kCondAE: {
1472 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right));
1473
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001474 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08001475 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1476 2 * vixl32::k16BitT32InstructionSizeInBytes,
1477 CodeBufferCheckScope::kExactSize);
1478
1479 __ it(eq);
1480 __ cmp(eq, LowRegisterFrom(left), LowRegisterFrom(right));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001481 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001482 break;
1483 }
1484 case kCondLE:
1485 case kCondGT:
1486 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001487 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001488 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001489 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001490 } else {
1491 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001492 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001493 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001494 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001495 }
1496
1497 std::swap(left, right);
1498 FALLTHROUGH_INTENDED;
1499 case kCondGE:
1500 case kCondLT: {
1501 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1502
1503 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right));
1504 __ Sbcs(temps.Acquire(), HighRegisterFrom(left), HighRegisterFrom(right));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001505 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001506 break;
1507 }
1508 default:
1509 LOG(FATAL) << "Unreachable";
1510 UNREACHABLE();
1511 }
1512
1513 return ret;
1514}
1515
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001516static std::pair<vixl32::Condition, vixl32::Condition> GenerateTest(HCondition* condition,
1517 bool invert,
1518 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001519 const DataType::Type type = condition->GetLeft()->GetType();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001520 IfCondition cond = condition->GetCondition();
1521 IfCondition opposite = condition->GetOppositeCondition();
1522 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001523
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001524 if (invert) {
1525 std::swap(cond, opposite);
1526 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001527
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001528 if (type == DataType::Type::kInt64) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001529 ret = condition->GetLocations()->InAt(1).IsConstant()
1530 ? GenerateLongTestConstant(condition, invert, codegen)
1531 : GenerateLongTest(condition, invert, codegen);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001532 } else if (DataType::IsFloatingPointType(type)) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001533 GenerateVcmp(condition, codegen);
1534 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
1535 ret = std::make_pair(ARMFPCondition(cond, condition->IsGtBias()),
1536 ARMFPCondition(opposite, condition->IsGtBias()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001537 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001538 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001539 __ Cmp(InputRegisterAt(condition, 0), InputOperandAt(condition, 1));
1540 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001541 }
1542
1543 return ret;
1544}
1545
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001546static void GenerateConditionGeneric(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001547 const vixl32::Register out = OutputRegister(cond);
1548 const auto condition = GenerateTest(cond, false, codegen);
1549
1550 __ Mov(LeaveFlags, out, 0);
1551
1552 if (out.IsLow()) {
1553 // We use the scope because of the IT block that follows.
1554 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1555 2 * vixl32::k16BitT32InstructionSizeInBytes,
1556 CodeBufferCheckScope::kExactSize);
1557
1558 __ it(condition.first);
1559 __ mov(condition.first, out, 1);
1560 } else {
1561 vixl32::Label done_label;
1562 vixl32::Label* const final_label = codegen->GetFinalLabel(cond, &done_label);
1563
Andreas Gampe3db70682018-12-26 15:12:03 -08001564 __ B(condition.second, final_label, /* is_far_target= */ false);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001565 __ Mov(out, 1);
1566
1567 if (done_label.IsReferenced()) {
1568 __ Bind(&done_label);
1569 }
1570 }
1571}
1572
1573static void GenerateEqualLong(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001574 DCHECK_EQ(cond->GetLeft()->GetType(), DataType::Type::kInt64);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001575
1576 const LocationSummary* const locations = cond->GetLocations();
1577 IfCondition condition = cond->GetCondition();
1578 const vixl32::Register out = OutputRegister(cond);
1579 const Location left = locations->InAt(0);
1580 const Location right = locations->InAt(1);
1581 vixl32::Register left_high = HighRegisterFrom(left);
1582 vixl32::Register left_low = LowRegisterFrom(left);
1583 vixl32::Register temp;
1584 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1585
1586 if (right.IsConstant()) {
1587 IfCondition opposite = cond->GetOppositeCondition();
1588 const int64_t value = AdjustConstantForCondition(Int64ConstantFrom(right),
1589 &condition,
1590 &opposite);
1591 Operand right_high = High32Bits(value);
1592 Operand right_low = Low32Bits(value);
1593
1594 // The output uses Location::kNoOutputOverlap.
1595 if (out.Is(left_high)) {
1596 std::swap(left_low, left_high);
1597 std::swap(right_low, right_high);
1598 }
1599
1600 __ Sub(out, left_low, right_low);
1601 temp = temps.Acquire();
1602 __ Sub(temp, left_high, right_high);
1603 } else {
1604 DCHECK(right.IsRegisterPair());
1605 temp = temps.Acquire();
1606 __ Sub(temp, left_high, HighRegisterFrom(right));
1607 __ Sub(out, left_low, LowRegisterFrom(right));
1608 }
1609
1610 // Need to check after calling AdjustConstantForCondition().
1611 DCHECK(condition == kCondEQ || condition == kCondNE) << condition;
1612
1613 if (condition == kCondNE && out.IsLow()) {
1614 __ Orrs(out, out, temp);
1615
1616 // We use the scope because of the IT block that follows.
1617 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1618 2 * vixl32::k16BitT32InstructionSizeInBytes,
1619 CodeBufferCheckScope::kExactSize);
1620
1621 __ it(ne);
1622 __ mov(ne, out, 1);
1623 } else {
1624 __ Orr(out, out, temp);
1625 codegen->GenerateConditionWithZero(condition, out, out, temp);
1626 }
1627}
1628
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001629static void GenerateConditionLong(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001630 DCHECK_EQ(cond->GetLeft()->GetType(), DataType::Type::kInt64);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001631
1632 const LocationSummary* const locations = cond->GetLocations();
1633 IfCondition condition = cond->GetCondition();
1634 const vixl32::Register out = OutputRegister(cond);
1635 const Location left = locations->InAt(0);
1636 const Location right = locations->InAt(1);
1637
1638 if (right.IsConstant()) {
1639 IfCondition opposite = cond->GetOppositeCondition();
1640
1641 // Comparisons against 0 are common enough to deserve special attention.
1642 if (AdjustConstantForCondition(Int64ConstantFrom(right), &condition, &opposite) == 0) {
1643 switch (condition) {
1644 case kCondNE:
1645 case kCondA:
1646 if (out.IsLow()) {
1647 // We only care if both input registers are 0 or not.
1648 __ Orrs(out, LowRegisterFrom(left), HighRegisterFrom(left));
1649
1650 // We use the scope because of the IT block that follows.
1651 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1652 2 * vixl32::k16BitT32InstructionSizeInBytes,
1653 CodeBufferCheckScope::kExactSize);
1654
1655 __ it(ne);
1656 __ mov(ne, out, 1);
1657 return;
1658 }
1659
1660 FALLTHROUGH_INTENDED;
1661 case kCondEQ:
1662 case kCondBE:
1663 // We only care if both input registers are 0 or not.
1664 __ Orr(out, LowRegisterFrom(left), HighRegisterFrom(left));
1665 codegen->GenerateConditionWithZero(condition, out, out);
1666 return;
1667 case kCondLT:
1668 case kCondGE:
1669 // We only care about the sign bit.
1670 FALLTHROUGH_INTENDED;
1671 case kCondAE:
1672 case kCondB:
1673 codegen->GenerateConditionWithZero(condition, out, HighRegisterFrom(left));
1674 return;
1675 case kCondLE:
1676 case kCondGT:
1677 default:
1678 break;
1679 }
1680 }
1681 }
1682
Anton Kirilov23b752b2017-07-20 14:40:44 +01001683 // If `out` is a low register, then the GenerateConditionGeneric()
1684 // function generates a shorter code sequence that is still branchless.
1685 if ((condition == kCondEQ || condition == kCondNE) && !out.IsLow()) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001686 GenerateEqualLong(cond, codegen);
1687 return;
1688 }
1689
Anton Kirilov23b752b2017-07-20 14:40:44 +01001690 GenerateConditionGeneric(cond, codegen);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001691}
1692
Roland Levillain6d729a72017-06-30 18:34:01 +01001693static void GenerateConditionIntegralOrNonPrimitive(HCondition* cond,
1694 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001695 const DataType::Type type = cond->GetLeft()->GetType();
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001696
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001697 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001698
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001699 if (type == DataType::Type::kInt64) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001700 GenerateConditionLong(cond, codegen);
1701 return;
1702 }
1703
1704 IfCondition condition = cond->GetCondition();
1705 vixl32::Register in = InputRegisterAt(cond, 0);
1706 const vixl32::Register out = OutputRegister(cond);
1707 const Location right = cond->GetLocations()->InAt(1);
1708 int64_t value;
1709
1710 if (right.IsConstant()) {
1711 IfCondition opposite = cond->GetOppositeCondition();
1712
1713 value = AdjustConstantForCondition(Int64ConstantFrom(right), &condition, &opposite);
1714
1715 // Comparisons against 0 are common enough to deserve special attention.
1716 if (value == 0) {
1717 switch (condition) {
1718 case kCondNE:
1719 case kCondA:
1720 if (out.IsLow() && out.Is(in)) {
1721 __ Cmp(out, 0);
1722
1723 // We use the scope because of the IT block that follows.
1724 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1725 2 * vixl32::k16BitT32InstructionSizeInBytes,
1726 CodeBufferCheckScope::kExactSize);
1727
1728 __ it(ne);
1729 __ mov(ne, out, 1);
1730 return;
1731 }
1732
1733 FALLTHROUGH_INTENDED;
1734 case kCondEQ:
1735 case kCondBE:
1736 case kCondLT:
1737 case kCondGE:
1738 case kCondAE:
1739 case kCondB:
1740 codegen->GenerateConditionWithZero(condition, out, in);
1741 return;
1742 case kCondLE:
1743 case kCondGT:
1744 default:
1745 break;
1746 }
1747 }
1748 }
1749
1750 if (condition == kCondEQ || condition == kCondNE) {
1751 Operand operand(0);
1752
1753 if (right.IsConstant()) {
1754 operand = Operand::From(value);
1755 } else if (out.Is(RegisterFrom(right))) {
1756 // Avoid 32-bit instructions if possible.
1757 operand = InputOperandAt(cond, 0);
1758 in = RegisterFrom(right);
1759 } else {
1760 operand = InputOperandAt(cond, 1);
1761 }
1762
1763 if (condition == kCondNE && out.IsLow()) {
1764 __ Subs(out, in, operand);
1765
1766 // We use the scope because of the IT block that follows.
1767 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1768 2 * vixl32::k16BitT32InstructionSizeInBytes,
1769 CodeBufferCheckScope::kExactSize);
1770
1771 __ it(ne);
1772 __ mov(ne, out, 1);
1773 } else {
1774 __ Sub(out, in, operand);
1775 codegen->GenerateConditionWithZero(condition, out, out);
1776 }
1777
1778 return;
1779 }
1780
1781 GenerateConditionGeneric(cond, codegen);
1782}
1783
Donghui Bai426b49c2016-11-08 14:55:38 +08001784static bool CanEncodeConstantAs8BitImmediate(HConstant* constant) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001785 const DataType::Type type = constant->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08001786 bool ret = false;
1787
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001788 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Donghui Bai426b49c2016-11-08 14:55:38 +08001789
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001790 if (type == DataType::Type::kInt64) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001791 const uint64_t value = Uint64ConstantFrom(constant);
1792
1793 ret = IsUint<8>(Low32Bits(value)) && IsUint<8>(High32Bits(value));
1794 } else {
1795 ret = IsUint<8>(Int32ConstantFrom(constant));
1796 }
1797
1798 return ret;
1799}
1800
1801static Location Arm8BitEncodableConstantOrRegister(HInstruction* constant) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001802 DCHECK(!DataType::IsFloatingPointType(constant->GetType()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001803
1804 if (constant->IsConstant() && CanEncodeConstantAs8BitImmediate(constant->AsConstant())) {
1805 return Location::ConstantLocation(constant->AsConstant());
1806 }
1807
1808 return Location::RequiresRegister();
1809}
1810
1811static bool CanGenerateConditionalMove(const Location& out, const Location& src) {
1812 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
1813 // we check that we are not dealing with floating-point output (there is no
1814 // 16-bit VMOV encoding).
1815 if (!out.IsRegister() && !out.IsRegisterPair()) {
1816 return false;
1817 }
1818
1819 // For constants, we also check that the output is in one or two low registers,
1820 // and that the constants fit in an 8-bit unsigned integer, so that a 16-bit
1821 // MOV encoding can be used.
1822 if (src.IsConstant()) {
1823 if (!CanEncodeConstantAs8BitImmediate(src.GetConstant())) {
1824 return false;
1825 }
1826
1827 if (out.IsRegister()) {
1828 if (!RegisterFrom(out).IsLow()) {
1829 return false;
1830 }
1831 } else {
1832 DCHECK(out.IsRegisterPair());
1833
1834 if (!HighRegisterFrom(out).IsLow()) {
1835 return false;
1836 }
1837 }
1838 }
1839
1840 return true;
1841}
1842
Scott Wakelingfe885462016-09-22 10:24:38 +01001843#undef __
1844
Donghui Bai426b49c2016-11-08 14:55:38 +08001845vixl32::Label* CodeGeneratorARMVIXL::GetFinalLabel(HInstruction* instruction,
1846 vixl32::Label* final_label) {
1847 DCHECK(!instruction->IsControlFlow() && !instruction->IsSuspendCheck());
Anton Kirilov6f644202017-02-27 18:29:45 +00001848 DCHECK(!instruction->IsInvoke() || !instruction->GetLocations()->CanCall());
Donghui Bai426b49c2016-11-08 14:55:38 +08001849
1850 const HBasicBlock* const block = instruction->GetBlock();
1851 const HLoopInformation* const info = block->GetLoopInformation();
1852 HInstruction* const next = instruction->GetNext();
1853
1854 // Avoid a branch to a branch.
1855 if (next->IsGoto() && (info == nullptr ||
1856 !info->IsBackEdge(*block) ||
1857 !info->HasSuspendCheck())) {
1858 final_label = GetLabelOf(next->AsGoto()->GetSuccessor());
1859 }
1860
1861 return final_label;
1862}
1863
Scott Wakelingfe885462016-09-22 10:24:38 +01001864CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
Scott Wakelingfe885462016-09-22 10:24:38 +01001865 const CompilerOptions& compiler_options,
1866 OptimizingCompilerStats* stats)
1867 : CodeGenerator(graph,
1868 kNumberOfCoreRegisters,
1869 kNumberOfSRegisters,
1870 kNumberOfRegisterPairs,
1871 kCoreCalleeSaves.GetList(),
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001872 ComputeSRegisterListMask(kFpuCalleeSaves),
Scott Wakelingfe885462016-09-22 10:24:38 +01001873 compiler_options,
1874 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001875 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1876 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Scott Wakelingfe885462016-09-22 10:24:38 +01001877 location_builder_(graph, this),
1878 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001879 move_resolver_(graph->GetAllocator(), this),
1880 assembler_(graph->GetAllocator()),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001881 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001882 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001883 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001884 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko8f63f102020-09-28 12:10:28 +01001885 public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1886 package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001887 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001888 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001889 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +01001890 call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001891 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +01001892 uint32_literals_(std::less<uint32_t>(),
1893 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Artem Serovc5fcb442016-12-02 19:19:58 +00001894 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001895 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Artem Serovc5fcb442016-12-02 19:19:58 +00001896 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +00001897 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1898 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1899 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Scott Wakelingfe885462016-09-22 10:24:38 +01001900 // Always save the LR register to mimic Quick.
1901 AddAllocatedRegister(Location::RegisterLocation(LR));
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00001902 // Give D30 and D31 as scratch register to VIXL. The register allocator only works on
1903 // S0-S31, which alias to D0-D15.
1904 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d31);
1905 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d30);
Scott Wakelingfe885462016-09-22 10:24:38 +01001906}
1907
Artem Serov551b28f2016-10-18 19:11:30 +01001908void JumpTableARMVIXL::EmitTable(CodeGeneratorARMVIXL* codegen) {
1909 uint32_t num_entries = switch_instr_->GetNumEntries();
1910 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
1911
1912 // We are about to use the assembler to place literals directly. Make sure we have enough
Scott Wakelingb77051e2016-11-21 19:46:00 +00001913 // underlying code buffer and we have generated a jump table of the right size, using
1914 // codegen->GetVIXLAssembler()->GetBuffer().Align();
Artem Serov0fb37192016-12-06 18:13:40 +00001915 ExactAssemblyScope aas(codegen->GetVIXLAssembler(),
1916 num_entries * sizeof(int32_t),
1917 CodeBufferCheckScope::kMaximumSize);
Artem Serov551b28f2016-10-18 19:11:30 +01001918 // TODO(VIXL): Check that using lower case bind is fine here.
1919 codegen->GetVIXLAssembler()->bind(&table_start_);
Artem Serov09a940d2016-11-11 16:15:11 +00001920 for (uint32_t i = 0; i < num_entries; i++) {
1921 codegen->GetVIXLAssembler()->place(bb_addresses_[i].get());
1922 }
1923}
1924
1925void JumpTableARMVIXL::FixTable(CodeGeneratorARMVIXL* codegen) {
1926 uint32_t num_entries = switch_instr_->GetNumEntries();
1927 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
1928
Artem Serov551b28f2016-10-18 19:11:30 +01001929 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
1930 for (uint32_t i = 0; i < num_entries; i++) {
1931 vixl32::Label* target_label = codegen->GetLabelOf(successors[i]);
1932 DCHECK(target_label->IsBound());
1933 int32_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
1934 // When doing BX to address we need to have lower bit set to 1 in T32.
1935 if (codegen->GetVIXLAssembler()->IsUsingT32()) {
1936 jump_offset++;
1937 }
1938 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
1939 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
Artem Serov09a940d2016-11-11 16:15:11 +00001940
Scott Wakelingb77051e2016-11-21 19:46:00 +00001941 bb_addresses_[i].get()->UpdateValue(jump_offset, codegen->GetVIXLAssembler()->GetBuffer());
Artem Serov551b28f2016-10-18 19:11:30 +01001942 }
1943}
1944
Artem Serov09a940d2016-11-11 16:15:11 +00001945void CodeGeneratorARMVIXL::FixJumpTables() {
Artem Serov551b28f2016-10-18 19:11:30 +01001946 for (auto&& jump_table : jump_tables_) {
Artem Serov09a940d2016-11-11 16:15:11 +00001947 jump_table->FixTable(this);
Artem Serov551b28f2016-10-18 19:11:30 +01001948 }
1949}
1950
Andreas Gampeca620d72016-11-08 08:09:33 -08001951#define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT
Scott Wakelingfe885462016-09-22 10:24:38 +01001952
1953void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) {
Artem Serov09a940d2016-11-11 16:15:11 +00001954 FixJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +00001955
1956 // Emit JIT baker read barrier slow paths.
Vladimir Marko695348f2020-05-19 14:42:02 +01001957 DCHECK(GetCompilerOptions().IsJitCompiler() || jit_baker_read_barrier_slow_paths_.empty());
Vladimir Marko966b46f2018-08-03 10:20:19 +00001958 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1959 uint32_t encoded_data = entry.first;
1960 vixl::aarch32::Label* slow_path_entry = &entry.second.label;
1961 __ Bind(slow_path_entry);
Andreas Gampe3db70682018-12-26 15:12:03 -08001962 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr);
Vladimir Marko966b46f2018-08-03 10:20:19 +00001963 }
1964
Scott Wakelingfe885462016-09-22 10:24:38 +01001965 GetAssembler()->FinalizeCode();
1966 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001967
1968 // Verify Baker read barrier linker patches.
1969 if (kIsDebugBuild) {
1970 ArrayRef<const uint8_t> code = allocator->GetMemory();
1971 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1972 DCHECK(info.label.IsBound());
1973 uint32_t literal_offset = info.label.GetLocation();
1974 DCHECK_ALIGNED(literal_offset, 2u);
1975
1976 auto GetInsn16 = [&code](uint32_t offset) {
1977 DCHECK_ALIGNED(offset, 2u);
1978 return (static_cast<uint32_t>(code[offset + 0]) << 0) +
1979 (static_cast<uint32_t>(code[offset + 1]) << 8);
1980 };
1981 auto GetInsn32 = [=](uint32_t offset) {
1982 return (GetInsn16(offset) << 16) + (GetInsn16(offset + 2u) << 0);
1983 };
1984
1985 uint32_t encoded_data = info.custom_data;
1986 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1987 // Check that the next instruction matches the expected LDR.
1988 switch (kind) {
1989 case BakerReadBarrierKind::kField: {
1990 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
1991 if (width == BakerReadBarrierWidth::kWide) {
1992 DCHECK_GE(code.size() - literal_offset, 8u);
1993 uint32_t next_insn = GetInsn32(literal_offset + 4u);
1994 // LDR (immediate), encoding T3, with correct base_reg.
1995 CheckValidReg((next_insn >> 12) & 0xfu); // Check destination register.
1996 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1997 CHECK_EQ(next_insn & 0xffff0000u, 0xf8d00000u | (base_reg << 16));
1998 } else {
1999 DCHECK_GE(code.size() - literal_offset, 6u);
2000 uint32_t next_insn = GetInsn16(literal_offset + 4u);
2001 // LDR (immediate), encoding T1, with correct base_reg.
2002 CheckValidReg(next_insn & 0x7u); // Check destination register.
2003 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
2004 CHECK_EQ(next_insn & 0xf838u, 0x6800u | (base_reg << 3));
2005 }
2006 break;
2007 }
2008 case BakerReadBarrierKind::kArray: {
2009 DCHECK_GE(code.size() - literal_offset, 8u);
2010 uint32_t next_insn = GetInsn32(literal_offset + 4u);
2011 // LDR (register) with correct base_reg, S=1 and option=011 (LDR Wt, [Xn, Xm, LSL #2]).
2012 CheckValidReg((next_insn >> 12) & 0xfu); // Check destination register.
2013 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
2014 CHECK_EQ(next_insn & 0xffff0ff0u, 0xf8500020u | (base_reg << 16));
2015 CheckValidReg(next_insn & 0xf); // Check index register
2016 break;
2017 }
2018 case BakerReadBarrierKind::kGcRoot: {
2019 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
2020 if (width == BakerReadBarrierWidth::kWide) {
2021 DCHECK_GE(literal_offset, 4u);
2022 uint32_t prev_insn = GetInsn32(literal_offset - 4u);
Vladimir Markof28be432018-08-14 12:20:51 +00002023 // LDR (immediate), encoding T3, with correct root_reg.
Vladimir Markoca1e0382018-04-11 09:58:41 +00002024 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Markof28be432018-08-14 12:20:51 +00002025 CHECK_EQ(prev_insn & 0xfff0f000u, 0xf8d00000u | (root_reg << 12));
Vladimir Markoca1e0382018-04-11 09:58:41 +00002026 } else {
2027 DCHECK_GE(literal_offset, 2u);
2028 uint32_t prev_insn = GetInsn16(literal_offset - 2u);
Vladimir Markoca1e0382018-04-11 09:58:41 +00002029 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko3d350a82020-11-18 14:14:27 +00002030 // Usually LDR (immediate), encoding T1, with correct root_reg but we may have
2031 // a `MOV marked, old_value` for intrinsic CAS where `marked` is a low register.
2032 if ((prev_insn & 0xff87u) != (0x4600 | root_reg)) {
2033 CHECK_EQ(prev_insn & 0xf807u, 0x6800u | root_reg);
2034 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00002035 }
2036 break;
2037 }
Vladimir Marko3d350a82020-11-18 14:14:27 +00002038 case BakerReadBarrierKind::kIntrinsicCas: {
Vladimir Markod887ed82018-08-14 13:52:12 +00002039 DCHECK_GE(literal_offset, 4u);
2040 uint32_t prev_insn = GetInsn32(literal_offset - 4u);
Vladimir Marko3d350a82020-11-18 14:14:27 +00002041 // MOV (register), encoding T3, with correct root_reg.
Vladimir Markod887ed82018-08-14 13:52:12 +00002042 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko3d350a82020-11-18 14:14:27 +00002043 DCHECK_GE(root_reg, 8u); // Used only for high registers.
2044 CHECK_EQ(prev_insn & 0xfffffff0u, 0xea4f0000u | (root_reg << 8));
Vladimir Markod887ed82018-08-14 13:52:12 +00002045 break;
2046 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00002047 default:
2048 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
2049 UNREACHABLE();
2050 }
2051 }
2052 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002053}
2054
2055void CodeGeneratorARMVIXL::SetupBlockedRegisters() const {
Scott Wakelingfe885462016-09-22 10:24:38 +01002056 // Stack register, LR and PC are always reserved.
2057 blocked_core_registers_[SP] = true;
2058 blocked_core_registers_[LR] = true;
2059 blocked_core_registers_[PC] = true;
2060
Roland Levillain6d729a72017-06-30 18:34:01 +01002061 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2062 // Reserve marking register.
2063 blocked_core_registers_[MR] = true;
2064 }
2065
Scott Wakelingfe885462016-09-22 10:24:38 +01002066 // Reserve thread register.
2067 blocked_core_registers_[TR] = true;
2068
2069 // Reserve temp register.
2070 blocked_core_registers_[IP] = true;
2071
2072 if (GetGraph()->IsDebuggable()) {
2073 // Stubs do not save callee-save floating point registers. If the graph
2074 // is debuggable, we need to deal with these registers differently. For
2075 // now, just block them.
2076 for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode();
2077 i <= kFpuCalleeSaves.GetLastSRegister().GetCode();
2078 ++i) {
2079 blocked_fpu_registers_[i] = true;
2080 }
2081 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002082}
2083
Scott Wakelingfe885462016-09-22 10:24:38 +01002084InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph,
2085 CodeGeneratorARMVIXL* codegen)
2086 : InstructionCodeGenerator(graph, codegen),
2087 assembler_(codegen->GetAssembler()),
2088 codegen_(codegen) {}
2089
2090void CodeGeneratorARMVIXL::ComputeSpillMask() {
2091 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
Vladimir Marko460f0542019-07-04 14:02:08 +01002092 DCHECK_NE(core_spill_mask_ & (1u << kLrCode), 0u)
2093 << "At least the return address register must be saved";
2094 // 16-bit PUSH/POP (T1) can save/restore just the LR/PC.
2095 DCHECK(GetVIXLAssembler()->IsUsingT32());
Scott Wakelingfe885462016-09-22 10:24:38 +01002096 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
2097 // We use vpush and vpop for saving and restoring floating point registers, which take
2098 // a SRegister and the number of registers to save/restore after that SRegister. We
2099 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
2100 // but in the range.
2101 if (fpu_spill_mask_ != 0) {
2102 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
2103 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
2104 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
2105 fpu_spill_mask_ |= (1 << i);
2106 }
2107 }
2108}
2109
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002110void CodeGeneratorARMVIXL::MaybeIncrementHotness(bool is_frame_entry) {
2111 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
2112 UseScratchRegisterScope temps(GetVIXLAssembler());
2113 vixl32::Register temp = temps.Acquire();
2114 static_assert(ArtMethod::MaxCounter() == 0xFFFF, "asm is probably wrong");
2115 if (!is_frame_entry) {
2116 __ Push(vixl32::Register(kMethodRegister));
Vladimir Markodec78172020-06-19 15:31:23 +01002117 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002118 GetAssembler()->LoadFromOffset(kLoadWord, kMethodRegister, sp, kArmWordSize);
2119 }
2120 // Load with zero extend to clear the high bits for integer overflow check.
2121 __ Ldrh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2122 __ Add(temp, temp, 1);
2123 // Subtract one if the counter would overflow.
2124 __ Sub(temp, temp, Operand(temp, ShiftType::LSR, 16));
2125 __ Strh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2126 if (!is_frame_entry) {
2127 __ Pop(vixl32::Register(kMethodRegister));
Vladimir Markodec78172020-06-19 15:31:23 +01002128 GetAssembler()->cfi().AdjustCFAOffset(-static_cast<int>(kArmWordSize));
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002129 }
2130 }
2131
2132 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray095dc462020-08-17 16:40:28 +01002133 ScopedProfilingInfoUse spiu(
2134 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
2135 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002136 if (info != nullptr) {
2137 uint32_t address = reinterpret_cast32<uint32_t>(info);
2138 vixl::aarch32::Label done;
2139 UseScratchRegisterScope temps(GetVIXLAssembler());
2140 temps.Exclude(ip);
2141 if (!is_frame_entry) {
2142 __ Push(r4); // Will be used as temporary. For frame entry, r4 is always available.
Vladimir Markodec78172020-06-19 15:31:23 +01002143 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002144 }
2145 __ Mov(r4, address);
2146 __ Ldrh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
2147 __ Add(ip, ip, 1);
Nicolas Geoffray8b8d93d2020-09-17 14:30:01 +01002148 instruction_visitor_.GenerateAndConst(ip, ip, interpreter::kTieredHotnessMask);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002149 __ Strh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
2150 if (!is_frame_entry) {
2151 __ Pop(r4);
Vladimir Markodec78172020-06-19 15:31:23 +01002152 GetAssembler()->cfi().AdjustCFAOffset(-static_cast<int>(kArmWordSize));
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002153 }
2154 __ Lsls(ip, ip, 16);
2155 __ B(ne, &done);
2156 uint32_t entry_point_offset =
2157 GetThreadOffset<kArmPointerSize>(kQuickCompileOptimized).Int32Value();
2158 if (HasEmptyFrame()) {
2159 CHECK(is_frame_entry);
2160 // For leaf methods, we need to spill lr and r0. Also spill r1 and r2 for
2161 // alignment.
2162 uint32_t core_spill_mask =
2163 (1 << lr.GetCode()) | (1 << r0.GetCode()) | (1 << r1.GetCode()) | (1 << r2.GetCode());
2164 __ Push(RegisterList(core_spill_mask));
Vladimir Markodec78172020-06-19 15:31:23 +01002165 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask));
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002166 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2167 __ Blx(lr);
2168 __ Pop(RegisterList(core_spill_mask));
Vladimir Markodec78172020-06-19 15:31:23 +01002169 GetAssembler()->cfi().AdjustCFAOffset(
2170 -static_cast<int>(kArmWordSize) * POPCOUNT(core_spill_mask));
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002171 } else {
2172 if (!RequiresCurrentMethod()) {
2173 CHECK(is_frame_entry);
2174 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
2175 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002176 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2177 __ Blx(lr);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002178 }
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002179 __ Bind(&done);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002180 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002181 }
2182}
2183
Scott Wakelingfe885462016-09-22 10:24:38 +01002184void CodeGeneratorARMVIXL::GenerateFrameEntry() {
2185 bool skip_overflow_check =
2186 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
2187 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
2188 __ Bind(&frame_entry_label_);
2189
2190 if (HasEmptyFrame()) {
David Srbecky30021842019-02-13 14:19:36 +00002191 // Ensure that the CFI opcode list is not empty.
2192 GetAssembler()->cfi().Nop();
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002193 MaybeIncrementHotness(/* is_frame_entry= */ true);
Scott Wakelingfe885462016-09-22 10:24:38 +01002194 return;
2195 }
2196
Scott Wakelingfe885462016-09-22 10:24:38 +01002197 if (!skip_overflow_check) {
xueliang.zhong10049552018-01-31 17:10:36 +00002198 // Using r4 instead of IP saves 2 bytes.
Nicolas Geoffray1a4f3ca2018-01-25 14:07:15 +00002199 UseScratchRegisterScope temps(GetVIXLAssembler());
xueliang.zhong10049552018-01-31 17:10:36 +00002200 vixl32::Register temp;
2201 // TODO: Remove this check when R4 is made a callee-save register
2202 // in ART compiled code (b/72801708). Currently we need to make
2203 // sure r4 is not blocked, e.g. in special purpose
2204 // TestCodeGeneratorARMVIXL; also asserting that r4 is available
2205 // here.
2206 if (!blocked_core_registers_[R4]) {
2207 for (vixl32::Register reg : kParameterCoreRegistersVIXL) {
2208 DCHECK(!reg.Is(r4));
2209 }
2210 DCHECK(!kCoreCalleeSaves.Includes(r4));
2211 temp = r4;
2212 } else {
2213 temp = temps.Acquire();
2214 }
Vladimir Marko33bff252017-11-01 14:35:42 +00002215 __ Sub(temp, sp, Operand::From(GetStackOverflowReservedBytes(InstructionSet::kArm)));
Scott Wakelingfe885462016-09-22 10:24:38 +01002216 // The load must immediately precede RecordPcInfo.
Artem Serov0fb37192016-12-06 18:13:40 +00002217 ExactAssemblyScope aas(GetVIXLAssembler(),
2218 vixl32::kMaxInstructionSizeInBytes,
2219 CodeBufferCheckScope::kMaximumSize);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002220 __ ldr(temp, MemOperand(temp));
2221 RecordPcInfo(nullptr, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01002222 }
2223
Vladimir Marko1a225a72019-07-05 13:37:42 +01002224 uint32_t frame_size = GetFrameSize();
2225 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
2226 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
2227 if ((fpu_spill_mask_ == 0u || IsPowerOfTwo(fpu_spill_mask_)) &&
2228 core_spills_offset <= 3u * kArmWordSize) {
2229 // Do a single PUSH for core registers including the method and up to two
2230 // filler registers. Then store the single FP spill if any.
2231 // (The worst case is when the method is not required and we actually
2232 // store 3 extra registers but they are stored in the same properly
2233 // aligned 16-byte chunk where we're already writing anyway.)
2234 DCHECK_EQ(kMethodRegister.GetCode(), 0u);
2235 uint32_t extra_regs = MaxInt<uint32_t>(core_spills_offset / kArmWordSize);
2236 DCHECK_LT(MostSignificantBit(extra_regs), LeastSignificantBit(core_spill_mask_));
2237 __ Push(RegisterList(core_spill_mask_ | extra_regs));
2238 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
2239 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
2240 core_spills_offset,
2241 core_spill_mask_,
2242 kArmWordSize);
2243 if (fpu_spill_mask_ != 0u) {
2244 DCHECK(IsPowerOfTwo(fpu_spill_mask_));
2245 vixl::aarch32::SRegister sreg(LeastSignificantBit(fpu_spill_mask_));
2246 GetAssembler()->StoreSToOffset(sreg, sp, fp_spills_offset);
2247 GetAssembler()->cfi().RelOffset(DWARFReg(sreg), /*offset=*/ fp_spills_offset);
2248 }
2249 } else {
2250 __ Push(RegisterList(core_spill_mask_));
2251 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
2252 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
2253 /*offset=*/ 0,
2254 core_spill_mask_,
2255 kArmWordSize);
2256 if (fpu_spill_mask_ != 0) {
2257 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
Scott Wakelingfe885462016-09-22 10:24:38 +01002258
Vladimir Marko1a225a72019-07-05 13:37:42 +01002259 // Check that list is contiguous.
2260 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
Scott Wakelingfe885462016-09-22 10:24:38 +01002261
Vladimir Marko1a225a72019-07-05 13:37:42 +01002262 __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
2263 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
2264 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0),
2265 /*offset=*/ 0,
2266 fpu_spill_mask_,
2267 kArmWordSize);
2268 }
Scott Wakelingbffdc702016-12-07 17:46:03 +00002269
Vladimir Marko1a225a72019-07-05 13:37:42 +01002270 // Adjust SP and save the current method if we need it. Note that we do
2271 // not save the method in HCurrentMethod, as the instruction might have
2272 // been removed in the SSA graph.
2273 if (RequiresCurrentMethod() && fp_spills_offset <= 3 * kArmWordSize) {
2274 DCHECK_EQ(kMethodRegister.GetCode(), 0u);
2275 __ Push(RegisterList(MaxInt<uint32_t>(fp_spills_offset / kArmWordSize)));
2276 GetAssembler()->cfi().AdjustCFAOffset(fp_spills_offset);
2277 } else {
Vladimir Markodec78172020-06-19 15:31:23 +01002278 IncreaseFrame(fp_spills_offset);
Vladimir Marko1a225a72019-07-05 13:37:42 +01002279 if (RequiresCurrentMethod()) {
2280 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
2281 }
2282 }
Scott Wakelingbffdc702016-12-07 17:46:03 +00002283 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01002284
2285 if (GetGraph()->HasShouldDeoptimizeFlag()) {
2286 UseScratchRegisterScope temps(GetVIXLAssembler());
2287 vixl32::Register temp = temps.Acquire();
2288 // Initialize should_deoptimize flag to 0.
2289 __ Mov(temp, 0);
2290 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, GetStackOffsetOfShouldDeoptimizeFlag());
2291 }
Roland Levillain5daa4952017-07-03 17:23:56 +01002292
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002293 MaybeIncrementHotness(/* is_frame_entry= */ true);
Andreas Gampe3db70682018-12-26 15:12:03 -08002294 MaybeGenerateMarkingRegisterCheck(/* code= */ 1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002295}
2296
2297void CodeGeneratorARMVIXL::GenerateFrameExit() {
2298 if (HasEmptyFrame()) {
2299 __ Bx(lr);
2300 return;
2301 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002302
Scott Wakelingfe885462016-09-22 10:24:38 +01002303 // Pop LR into PC to return.
2304 DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U);
2305 uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode;
Vladimir Marko1a225a72019-07-05 13:37:42 +01002306
2307 uint32_t frame_size = GetFrameSize();
2308 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
2309 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
2310 if ((fpu_spill_mask_ == 0u || IsPowerOfTwo(fpu_spill_mask_)) &&
2311 // r4 is blocked by TestCodeGeneratorARMVIXL used by some tests.
2312 core_spills_offset <= (blocked_core_registers_[r4.GetCode()] ? 2u : 3u) * kArmWordSize) {
2313 // Load the FP spill if any and then do a single POP including the method
2314 // and up to two filler registers. If we have no FP spills, this also has
2315 // the advantage that we do not need to emit CFI directives.
2316 if (fpu_spill_mask_ != 0u) {
2317 DCHECK(IsPowerOfTwo(fpu_spill_mask_));
2318 vixl::aarch32::SRegister sreg(LeastSignificantBit(fpu_spill_mask_));
2319 GetAssembler()->cfi().RememberState();
2320 GetAssembler()->LoadSFromOffset(sreg, sp, fp_spills_offset);
2321 GetAssembler()->cfi().Restore(DWARFReg(sreg));
2322 }
2323 // Clobber registers r2-r4 as they are caller-save in ART managed ABI and
2324 // never hold the return value.
2325 uint32_t extra_regs = MaxInt<uint32_t>(core_spills_offset / kArmWordSize) << r2.GetCode();
2326 DCHECK_EQ(extra_regs & kCoreCalleeSaves.GetList(), 0u);
2327 DCHECK_LT(MostSignificantBit(extra_regs), LeastSignificantBit(pop_mask));
2328 __ Pop(RegisterList(pop_mask | extra_regs));
2329 if (fpu_spill_mask_ != 0u) {
2330 GetAssembler()->cfi().RestoreState();
2331 }
2332 } else {
2333 GetAssembler()->cfi().RememberState();
Vladimir Markodec78172020-06-19 15:31:23 +01002334 DecreaseFrame(fp_spills_offset);
Vladimir Marko1a225a72019-07-05 13:37:42 +01002335 if (fpu_spill_mask_ != 0) {
2336 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
2337
2338 // Check that list is contiguous.
2339 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
2340
2341 __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
2342 GetAssembler()->cfi().AdjustCFAOffset(
2343 -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_));
2344 GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_);
2345 }
2346 __ Pop(RegisterList(pop_mask));
2347 GetAssembler()->cfi().RestoreState();
2348 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
2349 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002350}
2351
2352void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) {
2353 __ Bind(GetLabelOf(block));
2354}
2355
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002356Location InvokeDexCallingConventionVisitorARMVIXL::GetNextLocation(DataType::Type type) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002357 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002358 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002359 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002360 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002361 case DataType::Type::kInt8:
2362 case DataType::Type::kUint16:
2363 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002364 case DataType::Type::kInt32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002365 uint32_t index = gp_index_++;
2366 uint32_t stack_index = stack_index_++;
2367 if (index < calling_convention.GetNumberOfRegisters()) {
2368 return LocationFrom(calling_convention.GetRegisterAt(index));
2369 } else {
2370 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
2371 }
2372 }
2373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002374 case DataType::Type::kInt64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002375 uint32_t index = gp_index_;
2376 uint32_t stack_index = stack_index_;
2377 gp_index_ += 2;
2378 stack_index_ += 2;
2379 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
2380 if (calling_convention.GetRegisterAt(index).Is(r1)) {
2381 // Skip R1, and use R2_R3 instead.
2382 gp_index_++;
2383 index++;
2384 }
2385 }
2386 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
2387 DCHECK_EQ(calling_convention.GetRegisterAt(index).GetCode() + 1,
2388 calling_convention.GetRegisterAt(index + 1).GetCode());
2389
2390 return LocationFrom(calling_convention.GetRegisterAt(index),
2391 calling_convention.GetRegisterAt(index + 1));
2392 } else {
2393 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
2394 }
2395 }
2396
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002397 case DataType::Type::kFloat32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002398 uint32_t stack_index = stack_index_++;
2399 if (float_index_ % 2 == 0) {
2400 float_index_ = std::max(double_index_, float_index_);
2401 }
2402 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
2403 return LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
2404 } else {
2405 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
2406 }
2407 }
2408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002409 case DataType::Type::kFloat64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002410 double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
2411 uint32_t stack_index = stack_index_;
2412 stack_index_ += 2;
2413 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
2414 uint32_t index = double_index_;
2415 double_index_ += 2;
2416 Location result = LocationFrom(
2417 calling_convention.GetFpuRegisterAt(index),
2418 calling_convention.GetFpuRegisterAt(index + 1));
2419 DCHECK(ExpectedPairLayout(result));
2420 return result;
2421 } else {
2422 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
2423 }
2424 }
2425
Aart Bik66c158e2018-01-31 12:55:04 -08002426 case DataType::Type::kUint32:
2427 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002428 case DataType::Type::kVoid:
Artem Serovd4cc5b22016-11-04 11:19:09 +00002429 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002430 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00002431 }
2432 return Location::NoLocation();
2433}
2434
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002435Location InvokeDexCallingConventionVisitorARMVIXL::GetReturnLocation(DataType::Type type) const {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002436 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002437 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002438 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002439 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002440 case DataType::Type::kInt8:
2441 case DataType::Type::kUint16:
2442 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002443 case DataType::Type::kUint32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002444 case DataType::Type::kInt32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002445 return LocationFrom(r0);
2446 }
2447
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002448 case DataType::Type::kFloat32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002449 return LocationFrom(s0);
2450 }
2451
Aart Bik66c158e2018-01-31 12:55:04 -08002452 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002453 case DataType::Type::kInt64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002454 return LocationFrom(r0, r1);
2455 }
2456
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002457 case DataType::Type::kFloat64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002458 return LocationFrom(s0, s1);
2459 }
2460
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002461 case DataType::Type::kVoid:
Artem Serovd4cc5b22016-11-04 11:19:09 +00002462 return Location::NoLocation();
2463 }
2464
2465 UNREACHABLE();
2466}
2467
2468Location InvokeDexCallingConventionVisitorARMVIXL::GetMethodLocation() const {
2469 return LocationFrom(kMethodRegister);
2470}
2471
Vladimir Marko86c87522020-05-11 16:55:55 +01002472Location CriticalNativeCallingConventionVisitorARMVIXL::GetNextLocation(DataType::Type type) {
2473 DCHECK_NE(type, DataType::Type::kReference);
2474
2475 // Native ABI uses the same registers as managed, except that the method register r0
2476 // is a normal argument.
2477 Location location = Location::NoLocation();
2478 if (DataType::Is64BitType(type)) {
2479 gpr_index_ = RoundUp(gpr_index_, 2u);
2480 stack_offset_ = RoundUp(stack_offset_, 2 * kFramePointerSize);
2481 if (gpr_index_ < 1u + kParameterCoreRegistersLengthVIXL) {
2482 location = LocationFrom(gpr_index_ == 0u ? r0 : kParameterCoreRegistersVIXL[gpr_index_ - 1u],
2483 kParameterCoreRegistersVIXL[gpr_index_]);
2484 gpr_index_ += 2u;
2485 }
2486 } else {
2487 if (gpr_index_ < 1u + kParameterCoreRegistersLengthVIXL) {
2488 location = LocationFrom(gpr_index_ == 0u ? r0 : kParameterCoreRegistersVIXL[gpr_index_ - 1u]);
2489 ++gpr_index_;
2490 }
2491 }
2492 if (location.IsInvalid()) {
2493 if (DataType::Is64BitType(type)) {
2494 location = Location::DoubleStackSlot(stack_offset_);
2495 stack_offset_ += 2 * kFramePointerSize;
2496 } else {
2497 location = Location::StackSlot(stack_offset_);
2498 stack_offset_ += kFramePointerSize;
2499 }
2500
2501 if (for_register_allocation_) {
2502 location = Location::Any();
2503 }
2504 }
2505 return location;
2506}
2507
2508Location CriticalNativeCallingConventionVisitorARMVIXL::GetReturnLocation(DataType::Type type)
2509 const {
2510 // We perform conversion to the managed ABI return register after the call if needed.
2511 InvokeDexCallingConventionVisitorARMVIXL dex_calling_convention;
2512 return dex_calling_convention.GetReturnLocation(type);
2513}
2514
2515Location CriticalNativeCallingConventionVisitorARMVIXL::GetMethodLocation() const {
2516 // Pass the method in the hidden argument R4.
2517 return Location::RegisterLocation(R4);
2518}
2519
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002520void CodeGeneratorARMVIXL::Move32(Location destination, Location source) {
2521 if (source.Equals(destination)) {
2522 return;
2523 }
2524 if (destination.IsRegister()) {
2525 if (source.IsRegister()) {
2526 __ Mov(RegisterFrom(destination), RegisterFrom(source));
2527 } else if (source.IsFpuRegister()) {
2528 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
2529 } else {
2530 GetAssembler()->LoadFromOffset(kLoadWord,
2531 RegisterFrom(destination),
2532 sp,
2533 source.GetStackIndex());
2534 }
2535 } else if (destination.IsFpuRegister()) {
2536 if (source.IsRegister()) {
2537 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
2538 } else if (source.IsFpuRegister()) {
2539 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
2540 } else {
2541 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
2542 }
2543 } else {
2544 DCHECK(destination.IsStackSlot()) << destination;
2545 if (source.IsRegister()) {
2546 GetAssembler()->StoreToOffset(kStoreWord,
2547 RegisterFrom(source),
2548 sp,
2549 destination.GetStackIndex());
2550 } else if (source.IsFpuRegister()) {
2551 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
2552 } else {
2553 DCHECK(source.IsStackSlot()) << source;
2554 UseScratchRegisterScope temps(GetVIXLAssembler());
2555 vixl32::Register temp = temps.Acquire();
2556 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
2557 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
2558 }
2559 }
2560}
2561
Artem Serovcfbe9132016-10-14 15:58:56 +01002562void CodeGeneratorARMVIXL::MoveConstant(Location location, int32_t value) {
2563 DCHECK(location.IsRegister());
2564 __ Mov(RegisterFrom(location), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01002565}
2566
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002567void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002568 // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in
2569 // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend.
Vladimir Markoca6fff82017-10-03 14:49:14 +01002570 HParallelMove move(GetGraph()->GetAllocator());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002571 move.AddMove(src, dst, dst_type, nullptr);
2572 GetMoveResolver()->EmitNativeCode(&move);
Scott Wakelingfe885462016-09-22 10:24:38 +01002573}
2574
Artem Serovcfbe9132016-10-14 15:58:56 +01002575void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location, LocationSummary* locations) {
2576 if (location.IsRegister()) {
2577 locations->AddTemp(location);
2578 } else if (location.IsRegisterPair()) {
2579 locations->AddTemp(LocationFrom(LowRegisterFrom(location)));
2580 locations->AddTemp(LocationFrom(HighRegisterFrom(location)));
2581 } else {
2582 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
2583 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002584}
2585
2586void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
2587 HInstruction* instruction,
2588 uint32_t dex_pc,
2589 SlowPathCode* slow_path) {
2590 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Vladimir Markof6675082019-05-17 12:05:28 +01002591
2592 ThreadOffset32 entrypoint_offset = GetThreadOffset<kArmPointerSize>(entrypoint);
2593 // Reduce code size for AOT by using shared trampolines for slow path runtime calls across the
2594 // entire oat file. This adds an extra branch and we do not want to slow down the main path.
2595 // For JIT, thunk sharing is per-method, so the gains would be smaller or even negative.
Vladimir Marko695348f2020-05-19 14:42:02 +01002596 if (slow_path == nullptr || GetCompilerOptions().IsJitCompiler()) {
Vladimir Markof6675082019-05-17 12:05:28 +01002597 __ Ldr(lr, MemOperand(tr, entrypoint_offset.Int32Value()));
2598 // Ensure the pc position is recorded immediately after the `blx` instruction.
2599 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
2600 ExactAssemblyScope aas(GetVIXLAssembler(),
2601 vixl32::k16BitT32InstructionSizeInBytes,
2602 CodeBufferCheckScope::kExactSize);
2603 __ blx(lr);
2604 if (EntrypointRequiresStackMap(entrypoint)) {
2605 RecordPcInfo(instruction, dex_pc, slow_path);
2606 }
2607 } else {
2608 // Ensure the pc position is recorded immediately after the `bl` instruction.
2609 ExactAssemblyScope aas(GetVIXLAssembler(),
2610 vixl32::k32BitT32InstructionSizeInBytes,
2611 CodeBufferCheckScope::kExactSize);
2612 EmitEntrypointThunkCall(entrypoint_offset);
2613 if (EntrypointRequiresStackMap(entrypoint)) {
2614 RecordPcInfo(instruction, dex_pc, slow_path);
2615 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002616 }
2617}
2618
2619void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2620 HInstruction* instruction,
2621 SlowPathCode* slow_path) {
2622 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Alexandre Rames374ddf32016-11-04 10:40:49 +00002623 __ Ldr(lr, MemOperand(tr, entry_point_offset));
Scott Wakelingfe885462016-09-22 10:24:38 +01002624 __ Blx(lr);
2625}
2626
Scott Wakelingfe885462016-09-22 10:24:38 +01002627void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08002628 if (successor->IsExitBlock()) {
2629 DCHECK(got->GetPrevious()->AlwaysThrows());
2630 return; // no code needed
2631 }
2632
Scott Wakelingfe885462016-09-22 10:24:38 +01002633 HBasicBlock* block = got->GetBlock();
2634 HInstruction* previous = got->GetPrevious();
2635 HLoopInformation* info = block->GetLoopInformation();
2636
2637 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00002638 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Scott Wakelingfe885462016-09-22 10:24:38 +01002639 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2640 return;
2641 }
2642 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2643 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08002644 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 2);
Scott Wakelingfe885462016-09-22 10:24:38 +01002645 }
2646 if (!codegen_->GoesToNextBlock(block, successor)) {
2647 __ B(codegen_->GetLabelOf(successor));
2648 }
2649}
2650
2651void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) {
2652 got->SetLocations(nullptr);
2653}
2654
2655void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) {
2656 HandleGoto(got, got->GetSuccessor());
2657}
2658
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002659void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
2660 try_boundary->SetLocations(nullptr);
2661}
2662
2663void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
2664 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2665 if (!successor->IsExitBlock()) {
2666 HandleGoto(try_boundary, successor);
2667 }
2668}
2669
Scott Wakelingfe885462016-09-22 10:24:38 +01002670void LocationsBuilderARMVIXL::VisitExit(HExit* exit) {
2671 exit->SetLocations(nullptr);
2672}
2673
2674void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2675}
2676
Scott Wakelingfe885462016-09-22 10:24:38 +01002677void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
Anton Kirilov23b752b2017-07-20 14:40:44 +01002678 vixl32::Label* true_target,
2679 vixl32::Label* false_target,
Anton Kirilovfd522532017-05-10 12:46:57 +01002680 bool is_far_target) {
Anton Kirilov23b752b2017-07-20 14:40:44 +01002681 if (true_target == false_target) {
2682 DCHECK(true_target != nullptr);
2683 __ B(true_target);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002684 return;
2685 }
2686
Anton Kirilov23b752b2017-07-20 14:40:44 +01002687 vixl32::Label* non_fallthrough_target;
2688 bool invert;
2689 bool emit_both_branches;
Scott Wakelingfe885462016-09-22 10:24:38 +01002690
Anton Kirilov23b752b2017-07-20 14:40:44 +01002691 if (true_target == nullptr) {
2692 // The true target is fallthrough.
2693 DCHECK(false_target != nullptr);
2694 non_fallthrough_target = false_target;
2695 invert = true;
2696 emit_both_branches = false;
2697 } else {
2698 non_fallthrough_target = true_target;
2699 invert = false;
2700 // Either the false target is fallthrough, or there is no fallthrough
2701 // and both branches must be emitted.
2702 emit_both_branches = (false_target != nullptr);
Scott Wakelingfe885462016-09-22 10:24:38 +01002703 }
2704
Anton Kirilov23b752b2017-07-20 14:40:44 +01002705 const auto cond = GenerateTest(condition, invert, codegen_);
2706
2707 __ B(cond.first, non_fallthrough_target, is_far_target);
2708
2709 if (emit_both_branches) {
2710 // No target falls through, we need to branch.
2711 __ B(false_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002712 }
2713}
2714
2715void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction,
2716 size_t condition_input_index,
2717 vixl32::Label* true_target,
xueliang.zhongf51bc622016-11-04 09:23:32 +00002718 vixl32::Label* false_target,
2719 bool far_target) {
Scott Wakelingfe885462016-09-22 10:24:38 +01002720 HInstruction* cond = instruction->InputAt(condition_input_index);
2721
2722 if (true_target == nullptr && false_target == nullptr) {
2723 // Nothing to do. The code always falls through.
2724 return;
2725 } else if (cond->IsIntConstant()) {
2726 // Constant condition, statically compared against "true" (integer value 1).
2727 if (cond->AsIntConstant()->IsTrue()) {
2728 if (true_target != nullptr) {
2729 __ B(true_target);
2730 }
2731 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00002732 DCHECK(cond->AsIntConstant()->IsFalse()) << Int32ConstantFrom(cond);
Scott Wakelingfe885462016-09-22 10:24:38 +01002733 if (false_target != nullptr) {
2734 __ B(false_target);
2735 }
2736 }
2737 return;
2738 }
2739
2740 // The following code generates these patterns:
2741 // (1) true_target == nullptr && false_target != nullptr
2742 // - opposite condition true => branch to false_target
2743 // (2) true_target != nullptr && false_target == nullptr
2744 // - condition true => branch to true_target
2745 // (3) true_target != nullptr && false_target != nullptr
2746 // - condition true => branch to true_target
2747 // - branch to false_target
2748 if (IsBooleanValueOrMaterializedCondition(cond)) {
2749 // Condition has been materialized, compare the output to 0.
2750 if (kIsDebugBuild) {
2751 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
2752 DCHECK(cond_val.IsRegister());
2753 }
2754 if (true_target == nullptr) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00002755 __ CompareAndBranchIfZero(InputRegisterAt(instruction, condition_input_index),
2756 false_target,
2757 far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002758 } else {
xueliang.zhongf51bc622016-11-04 09:23:32 +00002759 __ CompareAndBranchIfNonZero(InputRegisterAt(instruction, condition_input_index),
2760 true_target,
2761 far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002762 }
2763 } else {
2764 // Condition has not been materialized. Use its inputs as the comparison and
2765 // its condition as the branch condition.
2766 HCondition* condition = cond->AsCondition();
2767
2768 // If this is a long or FP comparison that has been folded into
2769 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002770 DataType::Type type = condition->InputAt(0)->GetType();
2771 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
Anton Kirilovfd522532017-05-10 12:46:57 +01002772 GenerateCompareTestAndBranch(condition, true_target, false_target, far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002773 return;
2774 }
2775
Donghui Bai426b49c2016-11-08 14:55:38 +08002776 vixl32::Label* non_fallthrough_target;
2777 vixl32::Condition arm_cond = vixl32::Condition::None();
2778 const vixl32::Register left = InputRegisterAt(cond, 0);
2779 const Operand right = InputOperandAt(cond, 1);
2780
Scott Wakelingfe885462016-09-22 10:24:38 +01002781 if (true_target == nullptr) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002782 arm_cond = ARMCondition(condition->GetOppositeCondition());
2783 non_fallthrough_target = false_target;
Scott Wakelingfe885462016-09-22 10:24:38 +01002784 } else {
Donghui Bai426b49c2016-11-08 14:55:38 +08002785 arm_cond = ARMCondition(condition->GetCondition());
2786 non_fallthrough_target = true_target;
2787 }
2788
2789 if (right.IsImmediate() && right.GetImmediate() == 0 && (arm_cond.Is(ne) || arm_cond.Is(eq))) {
2790 if (arm_cond.Is(eq)) {
Anton Kirilovfd522532017-05-10 12:46:57 +01002791 __ CompareAndBranchIfZero(left, non_fallthrough_target, far_target);
Donghui Bai426b49c2016-11-08 14:55:38 +08002792 } else {
2793 DCHECK(arm_cond.Is(ne));
Anton Kirilovfd522532017-05-10 12:46:57 +01002794 __ CompareAndBranchIfNonZero(left, non_fallthrough_target, far_target);
Donghui Bai426b49c2016-11-08 14:55:38 +08002795 }
2796 } else {
2797 __ Cmp(left, right);
Anton Kirilovfd522532017-05-10 12:46:57 +01002798 __ B(arm_cond, non_fallthrough_target, far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002799 }
2800 }
2801
2802 // If neither branch falls through (case 3), the conditional branch to `true_target`
2803 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2804 if (true_target != nullptr && false_target != nullptr) {
2805 __ B(false_target);
2806 }
2807}
2808
2809void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002810 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
Scott Wakelingfe885462016-09-22 10:24:38 +01002811 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
2812 locations->SetInAt(0, Location::RequiresRegister());
2813 }
2814}
2815
2816void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) {
2817 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2818 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002819 vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2820 nullptr : codegen_->GetLabelOf(true_successor);
2821 vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2822 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08002823 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002824}
2825
Scott Wakelingc34dba72016-10-03 10:14:44 +01002826void LocationsBuilderARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002827 LocationSummary* locations = new (GetGraph()->GetAllocator())
Scott Wakelingc34dba72016-10-03 10:14:44 +01002828 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01002829 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2830 RegisterSet caller_saves = RegisterSet::Empty();
2831 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
2832 locations->SetCustomSlowPathCallerSaves(caller_saves);
Scott Wakelingc34dba72016-10-03 10:14:44 +01002833 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
2834 locations->SetInAt(0, Location::RequiresRegister());
2835 }
2836}
2837
2838void InstructionCodeGeneratorARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
2839 SlowPathCodeARMVIXL* slow_path =
2840 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARMVIXL>(deoptimize);
2841 GenerateTestAndBranch(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08002842 /* condition_input_index= */ 0,
Scott Wakelingc34dba72016-10-03 10:14:44 +01002843 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08002844 /* false_target= */ nullptr);
Scott Wakelingc34dba72016-10-03 10:14:44 +01002845}
2846
Artem Serovd4cc5b22016-11-04 11:19:09 +00002847void LocationsBuilderARMVIXL::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002848 LocationSummary* locations = new (GetGraph()->GetAllocator())
Artem Serovd4cc5b22016-11-04 11:19:09 +00002849 LocationSummary(flag, LocationSummary::kNoCall);
2850 locations->SetOut(Location::RequiresRegister());
2851}
2852
2853void InstructionCodeGeneratorARMVIXL::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2854 GetAssembler()->LoadFromOffset(kLoadWord,
2855 OutputRegister(flag),
2856 sp,
2857 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
2858}
2859
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002860void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002861 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002862 const bool is_floating_point = DataType::IsFloatingPointType(select->GetType());
Donghui Bai426b49c2016-11-08 14:55:38 +08002863
2864 if (is_floating_point) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002865 locations->SetInAt(0, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08002866 locations->SetInAt(1, Location::FpuRegisterOrConstant(select->GetTrueValue()));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002867 } else {
2868 locations->SetInAt(0, Location::RequiresRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08002869 locations->SetInAt(1, Arm8BitEncodableConstantOrRegister(select->GetTrueValue()));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002870 }
Donghui Bai426b49c2016-11-08 14:55:38 +08002871
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002872 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002873 locations->SetInAt(2, Location::RegisterOrConstant(select->GetCondition()));
2874 // The code generator handles overlap with the values, but not with the condition.
2875 locations->SetOut(Location::SameAsFirstInput());
2876 } else if (is_floating_point) {
2877 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2878 } else {
2879 if (!locations->InAt(1).IsConstant()) {
2880 locations->SetInAt(0, Arm8BitEncodableConstantOrRegister(select->GetFalseValue()));
2881 }
2882
2883 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002884 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002885}
2886
2887void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002888 HInstruction* const condition = select->GetCondition();
2889 const LocationSummary* const locations = select->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002890 const DataType::Type type = select->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08002891 const Location first = locations->InAt(0);
2892 const Location out = locations->Out();
2893 const Location second = locations->InAt(1);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002894
2895 // In the unlucky case the output of this instruction overlaps
2896 // with an input of an "emitted-at-use-site" condition, and
2897 // the output of this instruction is not one of its inputs, we'll
2898 // need to fallback to branches instead of conditional ARM instructions.
2899 bool output_overlaps_with_condition_inputs =
2900 !IsBooleanValueOrMaterializedCondition(condition) &&
2901 !out.Equals(first) &&
2902 !out.Equals(second) &&
2903 (condition->GetLocations()->InAt(0).Equals(out) ||
2904 condition->GetLocations()->InAt(1).Equals(out));
2905 DCHECK(!output_overlaps_with_condition_inputs || condition->IsCondition());
Donghui Bai426b49c2016-11-08 14:55:38 +08002906 Location src;
2907
2908 if (condition->IsIntConstant()) {
2909 if (condition->AsIntConstant()->IsFalse()) {
2910 src = first;
2911 } else {
2912 src = second;
2913 }
2914
2915 codegen_->MoveLocation(out, src, type);
2916 return;
2917 }
2918
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002919 if (!DataType::IsFloatingPointType(type) && !output_overlaps_with_condition_inputs) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002920 bool invert = false;
2921
2922 if (out.Equals(second)) {
2923 src = first;
2924 invert = true;
2925 } else if (out.Equals(first)) {
2926 src = second;
2927 } else if (second.IsConstant()) {
2928 DCHECK(CanEncodeConstantAs8BitImmediate(second.GetConstant()));
2929 src = second;
2930 } else if (first.IsConstant()) {
2931 DCHECK(CanEncodeConstantAs8BitImmediate(first.GetConstant()));
2932 src = first;
2933 invert = true;
2934 } else {
2935 src = second;
2936 }
2937
2938 if (CanGenerateConditionalMove(out, src)) {
2939 if (!out.Equals(first) && !out.Equals(second)) {
2940 codegen_->MoveLocation(out, src.Equals(first) ? second : first, type);
2941 }
2942
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002943 std::pair<vixl32::Condition, vixl32::Condition> cond(eq, ne);
2944
2945 if (IsBooleanValueOrMaterializedCondition(condition)) {
2946 __ Cmp(InputRegisterAt(select, 2), 0);
2947 cond = invert ? std::make_pair(eq, ne) : std::make_pair(ne, eq);
2948 } else {
2949 cond = GenerateTest(condition->AsCondition(), invert, codegen_);
2950 }
2951
Donghui Bai426b49c2016-11-08 14:55:38 +08002952 const size_t instr_count = out.IsRegisterPair() ? 4 : 2;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002953 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08002954 ExactAssemblyScope guard(GetVIXLAssembler(),
2955 instr_count * vixl32::k16BitT32InstructionSizeInBytes,
2956 CodeBufferCheckScope::kExactSize);
2957
2958 if (out.IsRegister()) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002959 __ it(cond.first);
2960 __ mov(cond.first, RegisterFrom(out), OperandFrom(src, type));
Donghui Bai426b49c2016-11-08 14:55:38 +08002961 } else {
2962 DCHECK(out.IsRegisterPair());
2963
2964 Operand operand_high(0);
2965 Operand operand_low(0);
2966
2967 if (src.IsConstant()) {
2968 const int64_t value = Int64ConstantFrom(src);
2969
2970 operand_high = High32Bits(value);
2971 operand_low = Low32Bits(value);
2972 } else {
2973 DCHECK(src.IsRegisterPair());
2974 operand_high = HighRegisterFrom(src);
2975 operand_low = LowRegisterFrom(src);
2976 }
2977
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002978 __ it(cond.first);
2979 __ mov(cond.first, LowRegisterFrom(out), operand_low);
2980 __ it(cond.first);
2981 __ mov(cond.first, HighRegisterFrom(out), operand_high);
Donghui Bai426b49c2016-11-08 14:55:38 +08002982 }
2983
2984 return;
2985 }
2986 }
2987
2988 vixl32::Label* false_target = nullptr;
2989 vixl32::Label* true_target = nullptr;
2990 vixl32::Label select_end;
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002991 vixl32::Label other_case;
Donghui Bai426b49c2016-11-08 14:55:38 +08002992 vixl32::Label* const target = codegen_->GetFinalLabel(select, &select_end);
2993
2994 if (out.Equals(second)) {
2995 true_target = target;
2996 src = first;
2997 } else {
2998 false_target = target;
2999 src = second;
3000
3001 if (!out.Equals(first)) {
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01003002 if (output_overlaps_with_condition_inputs) {
3003 false_target = &other_case;
3004 } else {
3005 codegen_->MoveLocation(out, first, type);
3006 }
Donghui Bai426b49c2016-11-08 14:55:38 +08003007 }
3008 }
3009
Andreas Gampe3db70682018-12-26 15:12:03 -08003010 GenerateTestAndBranch(select, 2, true_target, false_target, /* far_target= */ false);
Donghui Bai426b49c2016-11-08 14:55:38 +08003011 codegen_->MoveLocation(out, src, type);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01003012 if (output_overlaps_with_condition_inputs) {
3013 __ B(target);
3014 __ Bind(&other_case);
3015 codegen_->MoveLocation(out, first, type);
3016 }
Donghui Bai426b49c2016-11-08 14:55:38 +08003017
3018 if (select_end.IsReferenced()) {
3019 __ Bind(&select_end);
3020 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003021}
3022
Artem Serov551b28f2016-10-18 19:11:30 +01003023void LocationsBuilderARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003024 new (GetGraph()->GetAllocator()) LocationSummary(info);
Artem Serov551b28f2016-10-18 19:11:30 +01003025}
3026
3027void InstructionCodeGeneratorARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo*) {
3028 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
3029}
3030
Vladimir Markodec78172020-06-19 15:31:23 +01003031void CodeGeneratorARMVIXL::IncreaseFrame(size_t adjustment) {
3032 __ Claim(adjustment);
3033 GetAssembler()->cfi().AdjustCFAOffset(adjustment);
3034}
3035
3036void CodeGeneratorARMVIXL::DecreaseFrame(size_t adjustment) {
3037 __ Drop(adjustment);
3038 GetAssembler()->cfi().AdjustCFAOffset(-adjustment);
3039}
3040
Scott Wakelingfe885462016-09-22 10:24:38 +01003041void CodeGeneratorARMVIXL::GenerateNop() {
3042 __ Nop();
3043}
3044
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003045// `temp` is an extra temporary register that is used for some conditions;
3046// callers may not specify it, in which case the method will use a scratch
3047// register instead.
3048void CodeGeneratorARMVIXL::GenerateConditionWithZero(IfCondition condition,
3049 vixl32::Register out,
3050 vixl32::Register in,
3051 vixl32::Register temp) {
3052 switch (condition) {
3053 case kCondEQ:
3054 // x <= 0 iff x == 0 when the comparison is unsigned.
3055 case kCondBE:
3056 if (!temp.IsValid() || (out.IsLow() && !out.Is(in))) {
3057 temp = out;
3058 }
3059
3060 // Avoid 32-bit instructions if possible; note that `in` and `temp` must be
3061 // different as well.
3062 if (in.IsLow() && temp.IsLow() && !in.Is(temp)) {
3063 // temp = - in; only 0 sets the carry flag.
3064 __ Rsbs(temp, in, 0);
3065
3066 if (out.Is(in)) {
3067 std::swap(in, temp);
3068 }
3069
3070 // out = - in + in + carry = carry
3071 __ Adc(out, temp, in);
3072 } else {
3073 // If `in` is 0, then it has 32 leading zeros, and less than that otherwise.
3074 __ Clz(out, in);
3075 // Any number less than 32 logically shifted right by 5 bits results in 0;
3076 // the same operation on 32 yields 1.
3077 __ Lsr(out, out, 5);
3078 }
3079
3080 break;
3081 case kCondNE:
3082 // x > 0 iff x != 0 when the comparison is unsigned.
3083 case kCondA: {
3084 UseScratchRegisterScope temps(GetVIXLAssembler());
3085
3086 if (out.Is(in)) {
3087 if (!temp.IsValid() || in.Is(temp)) {
3088 temp = temps.Acquire();
3089 }
3090 } else if (!temp.IsValid() || !temp.IsLow()) {
3091 temp = out;
3092 }
3093
3094 // temp = in - 1; only 0 does not set the carry flag.
3095 __ Subs(temp, in, 1);
3096 // out = in + ~temp + carry = in + (-(in - 1) - 1) + carry = in - in + 1 - 1 + carry = carry
3097 __ Sbc(out, in, temp);
3098 break;
3099 }
3100 case kCondGE:
3101 __ Mvn(out, in);
3102 in = out;
3103 FALLTHROUGH_INTENDED;
3104 case kCondLT:
3105 // We only care about the sign bit.
3106 __ Lsr(out, in, 31);
3107 break;
3108 case kCondAE:
3109 // Trivially true.
3110 __ Mov(out, 1);
3111 break;
3112 case kCondB:
3113 // Trivially false.
3114 __ Mov(out, 0);
3115 break;
3116 default:
3117 LOG(FATAL) << "Unexpected condition " << condition;
3118 UNREACHABLE();
3119 }
3120}
3121
Scott Wakelingfe885462016-09-22 10:24:38 +01003122void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) {
3123 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003124 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01003125 const DataType::Type type = cond->InputAt(0)->GetType();
3126 if (DataType::IsFloatingPointType(type)) {
3127 locations->SetInAt(0, Location::RequiresFpuRegister());
3128 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(cond->InputAt(1)));
3129 } else {
3130 locations->SetInAt(0, Location::RequiresRegister());
3131 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
3132 }
3133 if (!cond->IsEmittedAtUseSite()) {
3134 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelingfe885462016-09-22 10:24:38 +01003135 }
3136}
3137
3138void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) {
3139 if (cond->IsEmittedAtUseSite()) {
3140 return;
3141 }
3142
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003143 const DataType::Type type = cond->GetLeft()->GetType();
Scott Wakelingfe885462016-09-22 10:24:38 +01003144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003145 if (DataType::IsFloatingPointType(type)) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003146 GenerateConditionGeneric(cond, codegen_);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003147 return;
Scott Wakelingfe885462016-09-22 10:24:38 +01003148 }
3149
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003150 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Scott Wakelingfe885462016-09-22 10:24:38 +01003151
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003152 const IfCondition condition = cond->GetCondition();
Scott Wakelingfe885462016-09-22 10:24:38 +01003153
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003154 // A condition with only one boolean input, or two boolean inputs without being equality or
3155 // inequality results from transformations done by the instruction simplifier, and is handled
3156 // as a regular condition with integral inputs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003157 if (type == DataType::Type::kBool &&
3158 cond->GetRight()->GetType() == DataType::Type::kBool &&
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003159 (condition == kCondEQ || condition == kCondNE)) {
3160 vixl32::Register left = InputRegisterAt(cond, 0);
3161 const vixl32::Register out = OutputRegister(cond);
3162 const Location right_loc = cond->GetLocations()->InAt(1);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003163
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003164 // The constant case is handled by the instruction simplifier.
3165 DCHECK(!right_loc.IsConstant());
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003166
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003167 vixl32::Register right = RegisterFrom(right_loc);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003168
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003169 // Avoid 32-bit instructions if possible.
3170 if (out.Is(right)) {
3171 std::swap(left, right);
3172 }
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003173
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003174 __ Eor(out, left, right);
3175
3176 if (condition == kCondEQ) {
3177 __ Eor(out, out, 1);
3178 }
3179
3180 return;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003181 }
Anton Kirilov6f644202017-02-27 18:29:45 +00003182
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003183 GenerateConditionIntegralOrNonPrimitive(cond, codegen_);
Scott Wakelingfe885462016-09-22 10:24:38 +01003184}
3185
3186void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) {
3187 HandleCondition(comp);
3188}
3189
3190void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) {
3191 HandleCondition(comp);
3192}
3193
3194void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) {
3195 HandleCondition(comp);
3196}
3197
3198void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) {
3199 HandleCondition(comp);
3200}
3201
3202void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) {
3203 HandleCondition(comp);
3204}
3205
3206void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) {
3207 HandleCondition(comp);
3208}
3209
3210void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
3211 HandleCondition(comp);
3212}
3213
3214void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
3215 HandleCondition(comp);
3216}
3217
3218void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
3219 HandleCondition(comp);
3220}
3221
3222void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
3223 HandleCondition(comp);
3224}
3225
3226void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
3227 HandleCondition(comp);
3228}
3229
3230void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
3231 HandleCondition(comp);
3232}
3233
3234void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) {
3235 HandleCondition(comp);
3236}
3237
3238void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) {
3239 HandleCondition(comp);
3240}
3241
3242void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
3243 HandleCondition(comp);
3244}
3245
3246void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
3247 HandleCondition(comp);
3248}
3249
3250void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) {
3251 HandleCondition(comp);
3252}
3253
3254void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) {
3255 HandleCondition(comp);
3256}
3257
3258void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
3259 HandleCondition(comp);
3260}
3261
3262void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
3263 HandleCondition(comp);
3264}
3265
3266void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) {
3267 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003268 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003269 locations->SetOut(Location::ConstantLocation(constant));
3270}
3271
3272void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
3273 // Will be generated at use site.
3274}
3275
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003276void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) {
3277 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003278 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003279 locations->SetOut(Location::ConstantLocation(constant));
3280}
3281
3282void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
3283 // Will be generated at use site.
3284}
3285
Scott Wakelingfe885462016-09-22 10:24:38 +01003286void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) {
3287 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003288 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003289 locations->SetOut(Location::ConstantLocation(constant));
3290}
3291
3292void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3293 // Will be generated at use site.
3294}
3295
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003296void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) {
3297 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003298 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003299 locations->SetOut(Location::ConstantLocation(constant));
3300}
3301
Scott Wakelingc34dba72016-10-03 10:14:44 +01003302void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(
3303 HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003304 // Will be generated at use site.
3305}
3306
3307void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) {
3308 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003309 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003310 locations->SetOut(Location::ConstantLocation(constant));
3311}
3312
Scott Wakelingc34dba72016-10-03 10:14:44 +01003313void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(
3314 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003315 // Will be generated at use site.
3316}
3317
Igor Murashkind01745e2017-04-05 16:40:31 -07003318void LocationsBuilderARMVIXL::VisitConstructorFence(HConstructorFence* constructor_fence) {
3319 constructor_fence->SetLocations(nullptr);
3320}
3321
3322void InstructionCodeGeneratorARMVIXL::VisitConstructorFence(
3323 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
3324 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3325}
3326
Scott Wakelingfe885462016-09-22 10:24:38 +01003327void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3328 memory_barrier->SetLocations(nullptr);
3329}
3330
3331void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3332 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
3333}
3334
3335void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) {
3336 ret->SetLocations(nullptr);
3337}
3338
3339void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
3340 codegen_->GenerateFrameExit();
3341}
3342
3343void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) {
3344 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003345 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003346 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
3347}
3348
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00003349void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret) {
3350 if (GetGraph()->IsCompilingOsr()) {
3351 // To simplify callers of an OSR method, we put the return value in both
3352 // floating point and core registers.
3353 switch (ret->InputAt(0)->GetType()) {
3354 case DataType::Type::kFloat32:
3355 __ Vmov(r0, s0);
3356 break;
3357 case DataType::Type::kFloat64:
3358 __ Vmov(r0, r1, d0);
3359 break;
3360 default:
3361 break;
3362 }
3363 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003364 codegen_->GenerateFrameExit();
3365}
3366
Artem Serovcfbe9132016-10-14 15:58:56 +01003367void LocationsBuilderARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3368 // The trampoline uses the same calling convention as dex calling conventions,
3369 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3370 // the method_idx.
3371 HandleInvoke(invoke);
3372}
3373
3374void InstructionCodeGeneratorARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3375 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08003376 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 3);
Artem Serovcfbe9132016-10-14 15:58:56 +01003377}
3378
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003379void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
3380 // Explicit clinit checks triggered by static invokes must have been pruned by
3381 // art::PrepareForRegisterAllocation.
3382 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
3383
Anton Kirilov5ec62182016-10-13 20:16:02 +01003384 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
3385 if (intrinsic.TryDispatch(invoke)) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003386 return;
3387 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003388
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01003389 if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
Vladimir Marko86c87522020-05-11 16:55:55 +01003390 CriticalNativeCallingConventionVisitorARMVIXL calling_convention_visitor(
3391 /*for_register_allocation=*/ true);
3392 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3393 } else {
3394 HandleInvoke(invoke);
3395 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003396}
3397
Anton Kirilov5ec62182016-10-13 20:16:02 +01003398static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARMVIXL* codegen) {
3399 if (invoke->GetLocations()->Intrinsified()) {
3400 IntrinsicCodeGeneratorARMVIXL intrinsic(codegen);
3401 intrinsic.Dispatch(invoke);
3402 return true;
3403 }
3404 return false;
3405}
3406
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003407void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
3408 // Explicit clinit checks triggered by static invokes must have been pruned by
3409 // art::PrepareForRegisterAllocation.
3410 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
3411
Anton Kirilov5ec62182016-10-13 20:16:02 +01003412 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08003413 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 4);
Anton Kirilov5ec62182016-10-13 20:16:02 +01003414 return;
3415 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003416
3417 LocationSummary* locations = invoke->GetLocations();
Artem Serovd4cc5b22016-11-04 11:19:09 +00003418 codegen_->GenerateStaticOrDirectCall(
3419 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Roland Levillain5daa4952017-07-03 17:23:56 +01003420
Andreas Gampe3db70682018-12-26 15:12:03 -08003421 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 5);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003422}
3423
3424void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00003425 InvokeDexCallingConventionVisitorARMVIXL calling_convention_visitor;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003426 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3427}
3428
3429void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003430 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
3431 if (intrinsic.TryDispatch(invoke)) {
3432 return;
3433 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003434
3435 HandleInvoke(invoke);
3436}
3437
3438void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003439 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08003440 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 6);
Anton Kirilov5ec62182016-10-13 20:16:02 +01003441 return;
3442 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003443
3444 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames374ddf32016-11-04 10:40:49 +00003445 DCHECK(!codegen_->IsLeafMethod());
Roland Levillain5daa4952017-07-03 17:23:56 +01003446
Andreas Gampe3db70682018-12-26 15:12:03 -08003447 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 7);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003448}
3449
Artem Serovcfbe9132016-10-14 15:58:56 +01003450void LocationsBuilderARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
3451 HandleInvoke(invoke);
3452 // Add the hidden argument.
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01003453 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
3454 // We cannot request r12 as it's blocked by the register allocator.
3455 invoke->GetLocations()->SetInAt(invoke->GetNumberOfArguments() - 1, Location::Any());
3456 }
Artem Serovcfbe9132016-10-14 15:58:56 +01003457}
3458
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00003459void CodeGeneratorARMVIXL::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
3460 vixl32::Register klass) {
3461 DCHECK_EQ(r0.GetCode(), klass.GetCode());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00003462 // We know the destination of an intrinsic, so no need to record inline
3463 // caches.
3464 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00003465 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00003466 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00003467 DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
Nicolas Geoffray095dc462020-08-17 16:40:28 +01003468 ScopedProfilingInfoUse spiu(
3469 Runtime::Current()->GetJit(), GetGraph()->GetArtMethod(), Thread::Current());
3470 ProfilingInfo* info = spiu.GetProfilingInfo();
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00003471 if (info != nullptr) {
3472 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
3473 uint32_t address = reinterpret_cast32<uint32_t>(cache);
3474 vixl32::Label done;
3475 UseScratchRegisterScope temps(GetVIXLAssembler());
3476 temps.Exclude(ip);
3477 __ Mov(r4, address);
3478 __ Ldr(ip, MemOperand(r4, InlineCache::ClassesOffset().Int32Value()));
3479 // Fast path for a monomorphic cache.
3480 __ Cmp(klass, ip);
3481 __ B(eq, &done, /* is_far_target= */ false);
3482 InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
3483 __ Bind(&done);
3484 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00003485 }
3486}
3487
Artem Serovcfbe9132016-10-14 15:58:56 +01003488void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
3489 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3490 LocationSummary* locations = invoke->GetLocations();
3491 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Artem Serovcfbe9132016-10-14 15:58:56 +01003492 Location receiver = locations->InAt(0);
3493 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3494
3495 DCHECK(!receiver.IsStackSlot());
3496
Alexandre Rames374ddf32016-11-04 10:40:49 +00003497 // Ensure the pc position is recorded immediately after the `ldr` instruction.
3498 {
Artem Serov0fb37192016-12-06 18:13:40 +00003499 ExactAssemblyScope aas(GetVIXLAssembler(),
3500 vixl32::kMaxInstructionSizeInBytes,
3501 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00003502 // /* HeapReference<Class> */ temp = receiver->klass_
3503 __ ldr(temp, MemOperand(RegisterFrom(receiver), class_offset));
3504 codegen_->MaybeRecordImplicitNullCheck(invoke);
3505 }
Artem Serovcfbe9132016-10-14 15:58:56 +01003506 // Instead of simply (possibly) unpoisoning `temp` here, we should
3507 // emit a read barrier for the previous class reference load.
3508 // However this is not required in practice, as this is an
3509 // intermediate/temporary reference and because the current
3510 // concurrent copying collector keeps the from-space memory
3511 // intact/accessible until the end of the marking phase (the
3512 // concurrent copying collector may not in the future).
3513 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00003514
3515 // If we're compiling baseline, update the inline cache.
3516 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
3517
Artem Serovcfbe9132016-10-14 15:58:56 +01003518 GetAssembler()->LoadFromOffset(kLoadWord,
3519 temp,
3520 temp,
3521 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00003522
Artem Serovcfbe9132016-10-14 15:58:56 +01003523 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
3524 invoke->GetImtIndex(), kArmPointerSize));
3525 // temp = temp->GetImtEntryAt(method_offset);
3526 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
3527 uint32_t entry_point =
3528 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value();
3529 // LR = temp->GetEntryPoint();
3530 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
3531
Scott Wakelingb77051e2016-11-21 19:46:00 +00003532 {
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01003533 // Set the hidden (in r12) argument. It is done here, right before a BLX to prevent other
3534 // instruction from clobbering it as they might use r12 as a scratch register.
3535 Location hidden_reg = Location::RegisterLocation(r12.GetCode());
Scott Wakelingb77051e2016-11-21 19:46:00 +00003536 // The VIXL macro assembler may clobber any of the scratch registers that are available to it,
3537 // so it checks if the application is using them (by passing them to the macro assembler
3538 // methods). The following application of UseScratchRegisterScope corrects VIXL's notion of
3539 // what is available, and is the opposite of the standard usage: Instead of requesting a
3540 // temporary location, it imposes an external constraint (i.e. a specific register is reserved
3541 // for the hidden argument). Note that this works even if VIXL needs a scratch register itself
3542 // (to materialize the constant), since the destination register becomes available for such use
3543 // internally for the duration of the macro instruction.
3544 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01003545 temps.Exclude(RegisterFrom(hidden_reg));
3546 if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive) {
3547 Location current_method = locations->InAt(invoke->GetNumberOfArguments() - 1);
3548 if (current_method.IsStackSlot()) {
3549 GetAssembler()->LoadFromOffset(
3550 kLoadWord, RegisterFrom(hidden_reg), sp, current_method.GetStackIndex());
3551 } else {
3552 __ Mov(RegisterFrom(hidden_reg), RegisterFrom(current_method));
3553 }
Nicolas Geoffrayd6bd1072020-11-30 18:42:01 +00003554 } else if (invoke->GetHiddenArgumentLoadKind() == MethodLoadKind::kRuntimeCall) {
3555 // We pass the method from the IMT in case of a conflict. This will ensure
3556 // we go into the runtime to resolve the actual method.
3557 CHECK_NE(temp.GetCode(), lr.GetCode());
3558 __ Mov(RegisterFrom(hidden_reg), temp);
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01003559 } else {
3560 codegen_->LoadMethod(invoke->GetHiddenArgumentLoadKind(), hidden_reg, invoke);
3561 }
Scott Wakelingb77051e2016-11-21 19:46:00 +00003562 }
Artem Serovcfbe9132016-10-14 15:58:56 +01003563 {
Alexandre Rames374ddf32016-11-04 10:40:49 +00003564 // Ensure the pc position is recorded immediately after the `blx` instruction.
3565 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
Artem Serov0fb37192016-12-06 18:13:40 +00003566 ExactAssemblyScope aas(GetVIXLAssembler(),
Alexandre Rames374ddf32016-11-04 10:40:49 +00003567 vixl32::k16BitT32InstructionSizeInBytes,
3568 CodeBufferCheckScope::kExactSize);
Artem Serovcfbe9132016-10-14 15:58:56 +01003569 // LR();
3570 __ blx(lr);
Artem Serovcfbe9132016-10-14 15:58:56 +01003571 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames374ddf32016-11-04 10:40:49 +00003572 DCHECK(!codegen_->IsLeafMethod());
Artem Serovcfbe9132016-10-14 15:58:56 +01003573 }
Roland Levillain5daa4952017-07-03 17:23:56 +01003574
Andreas Gampe3db70682018-12-26 15:12:03 -08003575 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 8);
Artem Serovcfbe9132016-10-14 15:58:56 +01003576}
3577
Orion Hodsonac141392017-01-13 11:53:47 +00003578void LocationsBuilderARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00003579 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
3580 if (intrinsic.TryDispatch(invoke)) {
3581 return;
3582 }
Orion Hodsonac141392017-01-13 11:53:47 +00003583 HandleInvoke(invoke);
3584}
3585
3586void InstructionCodeGeneratorARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
Andra Danciua0130e82020-07-23 12:34:56 +00003587 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3588 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 9);
3589 return;
3590 }
Orion Hodsonac141392017-01-13 11:53:47 +00003591 codegen_->GenerateInvokePolymorphicCall(invoke);
Andra Danciua0130e82020-07-23 12:34:56 +00003592 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 10);
Orion Hodsonac141392017-01-13 11:53:47 +00003593}
3594
Orion Hodson4c8e12e2018-05-18 08:33:20 +01003595void LocationsBuilderARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) {
3596 HandleInvoke(invoke);
3597}
3598
3599void InstructionCodeGeneratorARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) {
3600 codegen_->GenerateInvokeCustomCall(invoke);
Andra Danciua0130e82020-07-23 12:34:56 +00003601 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 11);
Orion Hodson4c8e12e2018-05-18 08:33:20 +01003602}
3603
Artem Serov02109dd2016-09-23 17:17:54 +01003604void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) {
3605 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003606 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Artem Serov02109dd2016-09-23 17:17:54 +01003607 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003608 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01003609 locations->SetInAt(0, Location::RequiresRegister());
3610 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3611 break;
3612 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003613 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01003614 locations->SetInAt(0, Location::RequiresRegister());
3615 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3616 break;
3617 }
3618
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003619 case DataType::Type::kFloat32:
3620 case DataType::Type::kFloat64:
Artem Serov02109dd2016-09-23 17:17:54 +01003621 locations->SetInAt(0, Location::RequiresFpuRegister());
3622 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3623 break;
3624
3625 default:
3626 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3627 }
3628}
3629
3630void InstructionCodeGeneratorARMVIXL::VisitNeg(HNeg* neg) {
3631 LocationSummary* locations = neg->GetLocations();
3632 Location out = locations->Out();
3633 Location in = locations->InAt(0);
3634 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003635 case DataType::Type::kInt32:
Artem Serov02109dd2016-09-23 17:17:54 +01003636 __ Rsb(OutputRegister(neg), InputRegisterAt(neg, 0), 0);
3637 break;
3638
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003639 case DataType::Type::kInt64:
Artem Serov02109dd2016-09-23 17:17:54 +01003640 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
3641 __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0);
3642 // We cannot emit an RSC (Reverse Subtract with Carry)
3643 // instruction here, as it does not exist in the Thumb-2
3644 // instruction set. We use the following approach
3645 // using SBC and SUB instead.
3646 //
3647 // out.hi = -C
3648 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out));
3649 // out.hi = out.hi - in.hi
3650 __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in));
3651 break;
3652
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003653 case DataType::Type::kFloat32:
3654 case DataType::Type::kFloat64:
Anton Kirilov644032c2016-12-06 17:51:43 +00003655 __ Vneg(OutputVRegister(neg), InputVRegister(neg));
Artem Serov02109dd2016-09-23 17:17:54 +01003656 break;
3657
3658 default:
3659 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3660 }
3661}
3662
Scott Wakelingfe885462016-09-22 10:24:38 +01003663void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003664 DataType::Type result_type = conversion->GetResultType();
3665 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003666 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3667 << input_type << " -> " << result_type;
Scott Wakelingfe885462016-09-22 10:24:38 +01003668
3669 // The float-to-long, double-to-long and long-to-float type conversions
3670 // rely on a call to the runtime.
3671 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003672 (((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
3673 && result_type == DataType::Type::kInt64)
3674 || (input_type == DataType::Type::kInt64 && result_type == DataType::Type::kFloat32))
Scott Wakelingfe885462016-09-22 10:24:38 +01003675 ? LocationSummary::kCallOnMainOnly
3676 : LocationSummary::kNoCall;
3677 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003678 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Scott Wakelingfe885462016-09-22 10:24:38 +01003679
Scott Wakelingfe885462016-09-22 10:24:38 +01003680 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003681 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003682 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003683 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003684 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003685 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
3686 locations->SetInAt(0, Location::RequiresRegister());
3687 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelingfe885462016-09-22 10:24:38 +01003688 break;
3689
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003690 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003691 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003692 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003693 locations->SetInAt(0, Location::Any());
3694 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3695 break;
3696
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003697 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003698 locations->SetInAt(0, Location::RequiresFpuRegister());
3699 locations->SetOut(Location::RequiresRegister());
3700 locations->AddTemp(Location::RequiresFpuRegister());
3701 break;
3702
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003703 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003704 locations->SetInAt(0, Location::RequiresFpuRegister());
3705 locations->SetOut(Location::RequiresRegister());
3706 locations->AddTemp(Location::RequiresFpuRegister());
3707 break;
3708
3709 default:
3710 LOG(FATAL) << "Unexpected type conversion from " << input_type
3711 << " to " << result_type;
3712 }
3713 break;
3714
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003715 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003716 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003717 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003718 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003719 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003720 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003721 case DataType::Type::kInt16:
3722 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003723 locations->SetInAt(0, Location::RequiresRegister());
3724 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3725 break;
3726
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003727 case DataType::Type::kFloat32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003728 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3729 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
3730 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003731 break;
3732 }
3733
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003734 case DataType::Type::kFloat64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003735 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3736 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0),
3737 calling_convention.GetFpuRegisterAt(1)));
3738 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003739 break;
3740 }
3741
3742 default:
3743 LOG(FATAL) << "Unexpected type conversion from " << input_type
3744 << " to " << result_type;
3745 }
3746 break;
3747
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003748 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003749 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003750 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003751 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003752 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003753 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003754 case DataType::Type::kInt16:
3755 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003756 locations->SetInAt(0, Location::RequiresRegister());
3757 locations->SetOut(Location::RequiresFpuRegister());
3758 break;
3759
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003760 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003761 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3762 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0),
3763 calling_convention.GetRegisterAt(1)));
3764 locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01003765 break;
3766 }
3767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003768 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003769 locations->SetInAt(0, Location::RequiresFpuRegister());
3770 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3771 break;
3772
3773 default:
3774 LOG(FATAL) << "Unexpected type conversion from " << input_type
3775 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003776 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003777 break;
3778
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003779 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003780 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003781 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003782 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003783 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003784 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003785 case DataType::Type::kInt16:
3786 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003787 locations->SetInAt(0, Location::RequiresRegister());
3788 locations->SetOut(Location::RequiresFpuRegister());
3789 break;
3790
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003791 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003792 locations->SetInAt(0, Location::RequiresRegister());
3793 locations->SetOut(Location::RequiresFpuRegister());
3794 locations->AddTemp(Location::RequiresFpuRegister());
3795 locations->AddTemp(Location::RequiresFpuRegister());
3796 break;
3797
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003798 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003799 locations->SetInAt(0, Location::RequiresFpuRegister());
3800 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3801 break;
3802
3803 default:
3804 LOG(FATAL) << "Unexpected type conversion from " << input_type
3805 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003806 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003807 break;
3808
3809 default:
3810 LOG(FATAL) << "Unexpected type conversion from " << input_type
3811 << " to " << result_type;
3812 }
3813}
3814
3815void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
3816 LocationSummary* locations = conversion->GetLocations();
3817 Location out = locations->Out();
3818 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003819 DataType::Type result_type = conversion->GetResultType();
3820 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003821 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3822 << input_type << " -> " << result_type;
Scott Wakelingfe885462016-09-22 10:24:38 +01003823 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003824 case DataType::Type::kUint8:
Scott Wakelingfe885462016-09-22 10:24:38 +01003825 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003826 case DataType::Type::kInt8:
3827 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003828 case DataType::Type::kInt16:
3829 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003830 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
3831 break;
3832 case DataType::Type::kInt64:
3833 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
3834 break;
3835
3836 default:
3837 LOG(FATAL) << "Unexpected type conversion from " << input_type
3838 << " to " << result_type;
3839 }
3840 break;
3841
3842 case DataType::Type::kInt8:
3843 switch (input_type) {
3844 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003845 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003846 case DataType::Type::kInt16:
3847 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003848 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
3849 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003850 case DataType::Type::kInt64:
3851 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
3852 break;
3853
3854 default:
3855 LOG(FATAL) << "Unexpected type conversion from " << input_type
3856 << " to " << result_type;
3857 }
3858 break;
3859
3860 case DataType::Type::kUint16:
3861 switch (input_type) {
3862 case DataType::Type::kInt8:
3863 case DataType::Type::kInt16:
3864 case DataType::Type::kInt32:
3865 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
3866 break;
3867 case DataType::Type::kInt64:
3868 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
3869 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003870
3871 default:
3872 LOG(FATAL) << "Unexpected type conversion from " << input_type
3873 << " to " << result_type;
3874 }
3875 break;
3876
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003877 case DataType::Type::kInt16:
Scott Wakelingfe885462016-09-22 10:24:38 +01003878 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003879 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003880 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003881 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
3882 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003883 case DataType::Type::kInt64:
3884 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
3885 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003886
3887 default:
3888 LOG(FATAL) << "Unexpected type conversion from " << input_type
3889 << " to " << result_type;
3890 }
3891 break;
3892
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003893 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003894 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003895 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003896 DCHECK(out.IsRegister());
3897 if (in.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003898 __ Mov(OutputRegister(conversion), LowRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003899 } else if (in.IsDoubleStackSlot()) {
3900 GetAssembler()->LoadFromOffset(kLoadWord,
3901 OutputRegister(conversion),
3902 sp,
3903 in.GetStackIndex());
3904 } else {
3905 DCHECK(in.IsConstant());
3906 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markoba1a48e2017-04-13 11:50:14 +01003907 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
3908 __ Mov(OutputRegister(conversion), static_cast<int32_t>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003909 }
3910 break;
3911
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003912 case DataType::Type::kFloat32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003913 vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003914 __ Vcvt(S32, F32, temp, InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01003915 __ Vmov(OutputRegister(conversion), temp);
3916 break;
3917 }
3918
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003919 case DataType::Type::kFloat64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003920 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003921 __ Vcvt(S32, F64, temp_s, DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003922 __ Vmov(OutputRegister(conversion), temp_s);
3923 break;
3924 }
3925
3926 default:
3927 LOG(FATAL) << "Unexpected type conversion from " << input_type
3928 << " to " << result_type;
3929 }
3930 break;
3931
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003932 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003933 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003934 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003935 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003936 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003937 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003938 case DataType::Type::kInt16:
3939 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003940 DCHECK(out.IsRegisterPair());
3941 DCHECK(in.IsRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003942 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01003943 // Sign extension.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003944 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
Scott Wakelingfe885462016-09-22 10:24:38 +01003945 break;
3946
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003947 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003948 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
3949 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
3950 break;
3951
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003952 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003953 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
3954 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
3955 break;
3956
3957 default:
3958 LOG(FATAL) << "Unexpected type conversion from " << input_type
3959 << " to " << result_type;
3960 }
3961 break;
3962
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003963 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003964 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003965 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003966 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003967 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003968 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003969 case DataType::Type::kInt16:
3970 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003971 __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003972 __ Vcvt(F32, S32, OutputSRegister(conversion), OutputSRegister(conversion));
Scott Wakelingfe885462016-09-22 10:24:38 +01003973 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003974
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003975 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003976 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
3977 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
3978 break;
3979
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003980 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01003981 __ Vcvt(F32, F64, OutputSRegister(conversion), DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003982 break;
3983
3984 default:
3985 LOG(FATAL) << "Unexpected type conversion from " << input_type
3986 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003987 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003988 break;
3989
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003990 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003991 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003992 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003993 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003994 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003995 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003996 case DataType::Type::kInt16:
3997 case DataType::Type::kInt32:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003998 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003999 __ Vcvt(F64, S32, DRegisterFrom(out), LowSRegisterFrom(out));
Scott Wakelingfe885462016-09-22 10:24:38 +01004000 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01004001
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004002 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004003 vixl32::Register low = LowRegisterFrom(in);
4004 vixl32::Register high = HighRegisterFrom(in);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004005 vixl32::SRegister out_s = LowSRegisterFrom(out);
Scott Wakelingc34dba72016-10-03 10:14:44 +01004006 vixl32::DRegister out_d = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004007 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01004008 vixl32::DRegister temp_d = DRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01004009 vixl32::DRegister constant_d = DRegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004010
4011 // temp_d = int-to-double(high)
4012 __ Vmov(temp_s, high);
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01004013 __ Vcvt(F64, S32, temp_d, temp_s);
Scott Wakelingfe885462016-09-22 10:24:38 +01004014 // constant_d = k2Pow32EncodingForDouble
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004015 __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
Scott Wakelingfe885462016-09-22 10:24:38 +01004016 // out_d = unsigned-to-double(low)
4017 __ Vmov(out_s, low);
4018 __ Vcvt(F64, U32, out_d, out_s);
4019 // out_d += temp_d * constant_d
4020 __ Vmla(F64, out_d, temp_d, constant_d);
4021 break;
4022 }
4023
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004024 case DataType::Type::kFloat32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01004025 __ Vcvt(F64, F32, DRegisterFrom(out), InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01004026 break;
4027
4028 default:
4029 LOG(FATAL) << "Unexpected type conversion from " << input_type
4030 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08004031 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004032 break;
4033
4034 default:
4035 LOG(FATAL) << "Unexpected type conversion from " << input_type
4036 << " to " << result_type;
4037 }
4038}
4039
4040void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) {
4041 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004042 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01004043 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004044 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004045 locations->SetInAt(0, Location::RequiresRegister());
4046 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
4047 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4048 break;
4049 }
4050
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004051 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004052 locations->SetInAt(0, Location::RequiresRegister());
Anton Kirilovdda43962016-11-21 19:55:20 +00004053 locations->SetInAt(1, ArmEncodableConstantOrRegister(add->InputAt(1), ADD));
Scott Wakelingfe885462016-09-22 10:24:38 +01004054 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4055 break;
4056 }
4057
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004058 case DataType::Type::kFloat32:
4059 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004060 locations->SetInAt(0, Location::RequiresFpuRegister());
4061 locations->SetInAt(1, Location::RequiresFpuRegister());
4062 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4063 break;
4064 }
4065
4066 default:
4067 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
4068 }
4069}
4070
4071void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) {
4072 LocationSummary* locations = add->GetLocations();
4073 Location out = locations->Out();
4074 Location first = locations->InAt(0);
4075 Location second = locations->InAt(1);
4076
4077 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004078 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004079 __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1));
4080 }
4081 break;
4082
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004083 case DataType::Type::kInt64: {
Anton Kirilovdda43962016-11-21 19:55:20 +00004084 if (second.IsConstant()) {
4085 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
4086 GenerateAddLongConst(out, first, value);
4087 } else {
4088 DCHECK(second.IsRegisterPair());
4089 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
4090 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
4091 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004092 break;
4093 }
4094
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004095 case DataType::Type::kFloat32:
4096 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004097 __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004098 break;
4099
4100 default:
4101 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
4102 }
4103}
4104
4105void LocationsBuilderARMVIXL::VisitSub(HSub* sub) {
4106 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004107 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01004108 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004109 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004110 locations->SetInAt(0, Location::RequiresRegister());
4111 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
4112 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4113 break;
4114 }
4115
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004116 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004117 locations->SetInAt(0, Location::RequiresRegister());
Anton Kirilovdda43962016-11-21 19:55:20 +00004118 locations->SetInAt(1, ArmEncodableConstantOrRegister(sub->InputAt(1), SUB));
Scott Wakelingfe885462016-09-22 10:24:38 +01004119 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4120 break;
4121 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004122 case DataType::Type::kFloat32:
4123 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004124 locations->SetInAt(0, Location::RequiresFpuRegister());
4125 locations->SetInAt(1, Location::RequiresFpuRegister());
4126 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4127 break;
4128 }
4129 default:
4130 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
4131 }
4132}
4133
4134void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) {
4135 LocationSummary* locations = sub->GetLocations();
4136 Location out = locations->Out();
4137 Location first = locations->InAt(0);
4138 Location second = locations->InAt(1);
4139 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004140 case DataType::Type::kInt32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004141 __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004142 break;
4143 }
4144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004145 case DataType::Type::kInt64: {
Anton Kirilovdda43962016-11-21 19:55:20 +00004146 if (second.IsConstant()) {
4147 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
4148 GenerateAddLongConst(out, first, -value);
4149 } else {
4150 DCHECK(second.IsRegisterPair());
4151 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
4152 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
4153 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004154 break;
4155 }
4156
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004157 case DataType::Type::kFloat32:
4158 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004159 __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004160 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01004161
4162 default:
4163 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
4164 }
4165}
4166
4167void LocationsBuilderARMVIXL::VisitMul(HMul* mul) {
4168 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004169 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01004170 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004171 case DataType::Type::kInt32:
4172 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004173 locations->SetInAt(0, Location::RequiresRegister());
4174 locations->SetInAt(1, Location::RequiresRegister());
4175 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4176 break;
4177 }
4178
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004179 case DataType::Type::kFloat32:
4180 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004181 locations->SetInAt(0, Location::RequiresFpuRegister());
4182 locations->SetInAt(1, Location::RequiresFpuRegister());
4183 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4184 break;
4185 }
4186
4187 default:
4188 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4189 }
4190}
4191
4192void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) {
4193 LocationSummary* locations = mul->GetLocations();
4194 Location out = locations->Out();
4195 Location first = locations->InAt(0);
4196 Location second = locations->InAt(1);
4197 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004198 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004199 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4200 break;
4201 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004202 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004203 vixl32::Register out_hi = HighRegisterFrom(out);
4204 vixl32::Register out_lo = LowRegisterFrom(out);
4205 vixl32::Register in1_hi = HighRegisterFrom(first);
4206 vixl32::Register in1_lo = LowRegisterFrom(first);
4207 vixl32::Register in2_hi = HighRegisterFrom(second);
4208 vixl32::Register in2_lo = LowRegisterFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004209
4210 // Extra checks to protect caused by the existence of R1_R2.
4211 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
4212 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
Anton Kirilov644032c2016-12-06 17:51:43 +00004213 DCHECK(!out_hi.Is(in1_lo));
4214 DCHECK(!out_hi.Is(in2_lo));
Scott Wakelingfe885462016-09-22 10:24:38 +01004215
4216 // input: in1 - 64 bits, in2 - 64 bits
4217 // output: out
4218 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
4219 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
4220 // parts: out.lo = (in1.lo * in2.lo)[31:0]
4221
4222 UseScratchRegisterScope temps(GetVIXLAssembler());
4223 vixl32::Register temp = temps.Acquire();
4224 // temp <- in1.lo * in2.hi
4225 __ Mul(temp, in1_lo, in2_hi);
4226 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
4227 __ Mla(out_hi, in1_hi, in2_lo, temp);
4228 // out.lo <- (in1.lo * in2.lo)[31:0];
4229 __ Umull(out_lo, temp, in1_lo, in2_lo);
4230 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004231 __ Add(out_hi, out_hi, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01004232 break;
4233 }
4234
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004235 case DataType::Type::kFloat32:
4236 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004237 __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004238 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01004239
4240 default:
4241 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4242 }
4243}
4244
Scott Wakelingfe885462016-09-22 10:24:38 +01004245void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
4246 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004247 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004248
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004249 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01004250 DCHECK(second.IsConstant());
4251
4252 vixl32::Register out = OutputRegister(instruction);
4253 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Anton Kirilov644032c2016-12-06 17:51:43 +00004254 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004255 DCHECK(imm == 1 || imm == -1);
4256
4257 if (instruction->IsRem()) {
4258 __ Mov(out, 0);
4259 } else {
4260 if (imm == 1) {
4261 __ Mov(out, dividend);
4262 } else {
4263 __ Rsb(out, dividend, 0);
4264 }
4265 }
4266}
4267
4268void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
4269 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004270 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004271
4272 LocationSummary* locations = instruction->GetLocations();
4273 Location second = locations->InAt(1);
4274 DCHECK(second.IsConstant());
4275
4276 vixl32::Register out = OutputRegister(instruction);
4277 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Anton Kirilov644032c2016-12-06 17:51:43 +00004278 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004279 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
4280 int ctz_imm = CTZ(abs_imm);
4281
Evgeny Astigeevichaf92a0f2020-06-26 13:28:33 +01004282 auto generate_div_code = [this, imm, ctz_imm](vixl32::Register out, vixl32::Register in) {
4283 __ Asr(out, in, ctz_imm);
Scott Wakelingfe885462016-09-22 10:24:38 +01004284 if (imm < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004285 __ Rsb(out, out, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01004286 }
Evgeny Astigeevichaf92a0f2020-06-26 13:28:33 +01004287 };
4288
Evgeny Astigeevich0f3d7ac2020-08-06 16:28:37 +01004289 if (HasNonNegativeOrMinIntInputAt(instruction, 0)) {
Evgeny Astigeevichaf92a0f2020-06-26 13:28:33 +01004290 // No need to adjust the result for non-negative dividends or the INT32_MIN dividend.
4291 // NOTE: The generated code for HDiv/HRem correctly works for the INT32_MIN dividend:
4292 // imm == 2
4293 // HDiv
4294 // add out, dividend(0x80000000), dividend(0x80000000), lsr #31 => out = 0x80000001
4295 // asr out, out(0x80000001), #1 => out = 0xc0000000
4296 // This is the same as 'asr out, dividend(0x80000000), #1'
4297 //
4298 // imm > 2
4299 // HDiv
4300 // asr out, dividend(0x80000000), #31 => out = -1
4301 // add out, dividend(0x80000000), out(-1), lsr #(32 - ctz_imm) => out = 0b10..01..1,
4302 // where the number of the rightmost 1s is ctz_imm.
4303 // asr out, out(0b10..01..1), #ctz_imm => out = 0b1..10..0, where the number of the
4304 // leftmost 1s is ctz_imm + 1.
4305 // This is the same as 'asr out, dividend(0x80000000), #ctz_imm'.
4306 //
4307 // imm == INT32_MIN
4308 // HDiv
4309 // asr out, dividend(0x80000000), #31 => out = -1
4310 // add out, dividend(0x80000000), out(-1), lsr #1 => out = 0xc0000000
4311 // asr out, out(0xc0000000), #31 => out = -1
4312 // rsb out, out(-1), #0 => out = 1
4313 // This is the same as
4314 // asr out, dividend(0x80000000), #31
4315 // rsb out, out, #0
4316 //
4317 //
4318 // INT_MIN % imm must be 0 for any imm of power 2. 'and' and 'ubfx' work only with bits
4319 // 0..30 of a dividend. For INT32_MIN those bits are zeros. So 'and' and 'ubfx' always
4320 // produce zero.
4321 if (instruction->IsDiv()) {
4322 generate_div_code(out, dividend);
4323 } else {
4324 if (GetVIXLAssembler()->IsModifiedImmediate(abs_imm - 1)) {
4325 __ And(out, dividend, abs_imm - 1);
4326 } else {
4327 __ Ubfx(out, dividend, 0, ctz_imm);
4328 }
4329 return;
4330 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004331 } else {
Evgeny Astigeevichaf92a0f2020-06-26 13:28:33 +01004332 vixl32::Register add_right_input = dividend;
4333 if (ctz_imm > 1) {
4334 __ Asr(out, dividend, 31);
4335 add_right_input = out;
4336 }
4337 __ Add(out, dividend, Operand(add_right_input, vixl32::LSR, 32 - ctz_imm));
4338
4339 if (instruction->IsDiv()) {
4340 generate_div_code(out, out);
4341 } else {
4342 __ Bfc(out, 0, ctz_imm);
4343 __ Sub(out, dividend, out);
4344 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004345 }
4346}
4347
4348void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
4349 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004350 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004351
4352 LocationSummary* locations = instruction->GetLocations();
4353 Location second = locations->InAt(1);
4354 DCHECK(second.IsConstant());
4355
4356 vixl32::Register out = OutputRegister(instruction);
4357 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004358 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0));
4359 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1));
Scott Wakelingb77051e2016-11-21 19:46:00 +00004360 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004361
4362 int64_t magic;
4363 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08004364 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Scott Wakelingfe885462016-09-22 10:24:38 +01004365
Evgeny Astigeevichf9388412020-07-02 15:25:13 +01004366 auto generate_unsigned_div_code =[this, magic, shift](vixl32::Register out,
4367 vixl32::Register dividend,
4368 vixl32::Register temp1,
4369 vixl32::Register temp2) {
4370 // TODO(VIXL): Change the static cast to Operand::From() after VIXL is fixed.
4371 __ Mov(temp1, static_cast<int32_t>(magic));
4372 if (magic > 0 && shift == 0) {
4373 __ Smull(temp2, out, dividend, temp1);
4374 } else {
4375 __ Smull(temp2, temp1, dividend, temp1);
4376 if (magic < 0) {
4377 // The negative magic M = static_cast<int>(m) means that the multiplier m is greater
4378 // than INT32_MAX. In such a case shift is never 0.
4379 // Proof:
4380 // m = (2^p + d - 2^p % d) / d, where p = 32 + shift, d > 2
4381 //
4382 // If shift == 0, m = (2^32 + d - 2^32 % d) / d =
4383 // = (2^32 + d - (2^32 - (2^32 / d) * d)) / d =
4384 // = (d + (2^32 / d) * d) / d = 1 + (2^32 / d), here '/' is the integer division.
4385 //
4386 // 1 + (2^32 / d) is decreasing when d is increasing.
4387 // The maximum is 1 431 655 766, when d == 3. This value is less than INT32_MAX.
4388 // the minimum is 3, when d = 2^31 -1.
4389 // So for all values of d in [3, INT32_MAX] m with p == 32 is in [3, INT32_MAX) and
4390 // is never less than 0.
4391 __ Add(temp1, temp1, dividend);
4392 }
4393 DCHECK_NE(shift, 0);
4394 __ Lsr(out, temp1, shift);
4395 }
4396 };
Scott Wakelingfe885462016-09-22 10:24:38 +01004397
Evgeny Astigeevich0f3d7ac2020-08-06 16:28:37 +01004398 if (imm > 0 && HasNonNegativeInputAt(instruction, 0)) {
Evgeny Astigeevichf9388412020-07-02 15:25:13 +01004399 // No need to adjust the result for a non-negative dividend and a positive divisor.
4400 if (instruction->IsDiv()) {
4401 generate_unsigned_div_code(out, dividend, temp1, temp2);
4402 } else {
4403 generate_unsigned_div_code(temp1, dividend, temp1, temp2);
4404 __ Mov(temp2, imm);
4405 __ Mls(out, temp1, temp2, dividend);
4406 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004407 } else {
Evgeny Astigeevichf9388412020-07-02 15:25:13 +01004408 // TODO(VIXL): Change the static cast to Operand::From() after VIXL is fixed.
4409 __ Mov(temp1, static_cast<int32_t>(magic));
4410 __ Smull(temp2, temp1, dividend, temp1);
4411
4412 if (imm > 0 && magic < 0) {
4413 __ Add(temp1, temp1, dividend);
4414 } else if (imm < 0 && magic > 0) {
4415 __ Sub(temp1, temp1, dividend);
4416 }
4417
4418 if (shift != 0) {
4419 __ Asr(temp1, temp1, shift);
4420 }
4421
4422 if (instruction->IsDiv()) {
4423 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
4424 } else {
4425 __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
4426 // TODO: Strength reduction for mls.
4427 __ Mov(temp2, imm);
4428 __ Mls(out, temp1, temp2, dividend);
4429 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004430 }
4431}
4432
4433void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral(
4434 HBinaryOperation* instruction) {
4435 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004436 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004437
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004438 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01004439 DCHECK(second.IsConstant());
4440
Anton Kirilov644032c2016-12-06 17:51:43 +00004441 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004442 if (imm == 0) {
4443 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
4444 } else if (imm == 1 || imm == -1) {
4445 DivRemOneOrMinusOne(instruction);
4446 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
4447 DivRemByPowerOfTwo(instruction);
4448 } else {
4449 DCHECK(imm <= -2 || imm >= 2);
4450 GenerateDivRemWithAnyConstant(instruction);
4451 }
4452}
4453
4454void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) {
4455 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004456 if (div->GetResultType() == DataType::Type::kInt64) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004457 // pLdiv runtime call.
4458 call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004459 } else if (div->GetResultType() == DataType::Type::kInt32 && div->InputAt(1)->IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004460 // sdiv will be replaced by other instruction sequence.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004461 } else if (div->GetResultType() == DataType::Type::kInt32 &&
Scott Wakelingfe885462016-09-22 10:24:38 +01004462 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4463 // pIdivmod runtime call.
4464 call_kind = LocationSummary::kCallOnMainOnly;
4465 }
4466
Vladimir Markoca6fff82017-10-03 14:49:14 +01004467 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Scott Wakelingfe885462016-09-22 10:24:38 +01004468
4469 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004470 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004471 if (div->InputAt(1)->IsConstant()) {
4472 locations->SetInAt(0, Location::RequiresRegister());
4473 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
Anton Kirilov644032c2016-12-06 17:51:43 +00004474 int32_t value = Int32ConstantFrom(div->InputAt(1));
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004475 Location::OutputOverlap out_overlaps = Location::kNoOutputOverlap;
Scott Wakelingfe885462016-09-22 10:24:38 +01004476 if (value == 1 || value == 0 || value == -1) {
4477 // No temp register required.
Evgeny Astigeevichaf92a0f2020-06-26 13:28:33 +01004478 } else if (IsPowerOfTwo(AbsOrMin(value)) &&
4479 value != 2 &&
4480 value != -2 &&
Evgeny Astigeevich0f3d7ac2020-08-06 16:28:37 +01004481 !HasNonNegativeOrMinIntInputAt(div, 0)) {
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004482 // The "out" register is used as a temporary, so it overlaps with the inputs.
4483 out_overlaps = Location::kOutputOverlap;
Scott Wakelingfe885462016-09-22 10:24:38 +01004484 } else {
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004485 locations->AddRegisterTemps(2);
Scott Wakelingfe885462016-09-22 10:24:38 +01004486 }
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004487 locations->SetOut(Location::RequiresRegister(), out_overlaps);
Scott Wakelingfe885462016-09-22 10:24:38 +01004488 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4489 locations->SetInAt(0, Location::RequiresRegister());
4490 locations->SetInAt(1, Location::RequiresRegister());
4491 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4492 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01004493 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4494 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4495 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004496 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Artem Serov551b28f2016-10-18 19:11:30 +01004497 // we only need the former.
4498 locations->SetOut(LocationFrom(r0));
Scott Wakelingfe885462016-09-22 10:24:38 +01004499 }
4500 break;
4501 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004502 case DataType::Type::kInt64: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004503 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4504 locations->SetInAt(0, LocationFrom(
4505 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4506 locations->SetInAt(1, LocationFrom(
4507 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4508 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004509 break;
4510 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004511 case DataType::Type::kFloat32:
4512 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004513 locations->SetInAt(0, Location::RequiresFpuRegister());
4514 locations->SetInAt(1, Location::RequiresFpuRegister());
4515 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4516 break;
4517 }
4518
4519 default:
4520 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4521 }
4522}
4523
4524void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004525 Location lhs = div->GetLocations()->InAt(0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004526 Location rhs = div->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01004527
4528 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004529 case DataType::Type::kInt32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004530 if (rhs.IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004531 GenerateDivRemConstantIntegral(div);
4532 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4533 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
4534 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01004535 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4536 DCHECK(calling_convention.GetRegisterAt(0).Is(RegisterFrom(lhs)));
4537 DCHECK(calling_convention.GetRegisterAt(1).Is(RegisterFrom(rhs)));
4538 DCHECK(r0.Is(OutputRegister(div)));
4539
4540 codegen_->InvokeRuntime(kQuickIdivmod, div, div->GetDexPc());
4541 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01004542 }
4543 break;
4544 }
4545
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004546 case DataType::Type::kInt64: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004547 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4548 DCHECK(calling_convention.GetRegisterAt(0).Is(LowRegisterFrom(lhs)));
4549 DCHECK(calling_convention.GetRegisterAt(1).Is(HighRegisterFrom(lhs)));
4550 DCHECK(calling_convention.GetRegisterAt(2).Is(LowRegisterFrom(rhs)));
4551 DCHECK(calling_convention.GetRegisterAt(3).Is(HighRegisterFrom(rhs)));
4552 DCHECK(LowRegisterFrom(div->GetLocations()->Out()).Is(r0));
4553 DCHECK(HighRegisterFrom(div->GetLocations()->Out()).Is(r1));
4554
4555 codegen_->InvokeRuntime(kQuickLdiv, div, div->GetDexPc());
4556 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01004557 break;
4558 }
4559
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004560 case DataType::Type::kFloat32:
4561 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004562 __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004563 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01004564
4565 default:
4566 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4567 }
4568}
4569
Artem Serov551b28f2016-10-18 19:11:30 +01004570void LocationsBuilderARMVIXL::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004571 DataType::Type type = rem->GetResultType();
Artem Serov551b28f2016-10-18 19:11:30 +01004572
4573 // Most remainders are implemented in the runtime.
4574 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004575 if (rem->GetResultType() == DataType::Type::kInt32 && rem->InputAt(1)->IsConstant()) {
Artem Serov551b28f2016-10-18 19:11:30 +01004576 // sdiv will be replaced by other instruction sequence.
4577 call_kind = LocationSummary::kNoCall;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004578 } else if ((rem->GetResultType() == DataType::Type::kInt32)
Artem Serov551b28f2016-10-18 19:11:30 +01004579 && codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4580 // Have hardware divide instruction for int, do it with three instructions.
4581 call_kind = LocationSummary::kNoCall;
4582 }
4583
Vladimir Markoca6fff82017-10-03 14:49:14 +01004584 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Artem Serov551b28f2016-10-18 19:11:30 +01004585
4586 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004587 case DataType::Type::kInt32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004588 if (rem->InputAt(1)->IsConstant()) {
4589 locations->SetInAt(0, Location::RequiresRegister());
4590 locations->SetInAt(1, Location::ConstantLocation(rem->InputAt(1)->AsConstant()));
Anton Kirilov644032c2016-12-06 17:51:43 +00004591 int32_t value = Int32ConstantFrom(rem->InputAt(1));
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004592 Location::OutputOverlap out_overlaps = Location::kNoOutputOverlap;
Artem Serov551b28f2016-10-18 19:11:30 +01004593 if (value == 1 || value == 0 || value == -1) {
4594 // No temp register required.
Evgeny Astigeevich0f3d7ac2020-08-06 16:28:37 +01004595 } else if (IsPowerOfTwo(AbsOrMin(value)) && !HasNonNegativeOrMinIntInputAt(rem, 0)) {
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004596 // The "out" register is used as a temporary, so it overlaps with the inputs.
4597 out_overlaps = Location::kOutputOverlap;
Artem Serov551b28f2016-10-18 19:11:30 +01004598 } else {
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004599 locations->AddRegisterTemps(2);
Artem Serov551b28f2016-10-18 19:11:30 +01004600 }
Evgeny Astigeevich2d101172020-06-25 16:52:03 +01004601 locations->SetOut(Location::RequiresRegister(), out_overlaps);
Artem Serov551b28f2016-10-18 19:11:30 +01004602 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4603 locations->SetInAt(0, Location::RequiresRegister());
4604 locations->SetInAt(1, Location::RequiresRegister());
4605 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4606 locations->AddTemp(Location::RequiresRegister());
4607 } else {
4608 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4609 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4610 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004611 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Artem Serov551b28f2016-10-18 19:11:30 +01004612 // we only need the latter.
4613 locations->SetOut(LocationFrom(r1));
4614 }
4615 break;
4616 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004617 case DataType::Type::kInt64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004618 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4619 locations->SetInAt(0, LocationFrom(
4620 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4621 locations->SetInAt(1, LocationFrom(
4622 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4623 // The runtime helper puts the output in R2,R3.
4624 locations->SetOut(LocationFrom(r2, r3));
4625 break;
4626 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004627 case DataType::Type::kFloat32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004628 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4629 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4630 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4631 locations->SetOut(LocationFrom(s0));
4632 break;
4633 }
4634
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004635 case DataType::Type::kFloat64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004636 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4637 locations->SetInAt(0, LocationFrom(
4638 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
4639 locations->SetInAt(1, LocationFrom(
4640 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
4641 locations->SetOut(LocationFrom(s0, s1));
4642 break;
4643 }
4644
4645 default:
4646 LOG(FATAL) << "Unexpected rem type " << type;
4647 }
4648}
4649
4650void InstructionCodeGeneratorARMVIXL::VisitRem(HRem* rem) {
4651 LocationSummary* locations = rem->GetLocations();
4652 Location second = locations->InAt(1);
4653
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004654 DataType::Type type = rem->GetResultType();
Artem Serov551b28f2016-10-18 19:11:30 +01004655 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004656 case DataType::Type::kInt32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004657 vixl32::Register reg1 = InputRegisterAt(rem, 0);
4658 vixl32::Register out_reg = OutputRegister(rem);
4659 if (second.IsConstant()) {
4660 GenerateDivRemConstantIntegral(rem);
4661 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4662 vixl32::Register reg2 = RegisterFrom(second);
4663 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
4664
4665 // temp = reg1 / reg2 (integer division)
4666 // dest = reg1 - temp * reg2
4667 __ Sdiv(temp, reg1, reg2);
4668 __ Mls(out_reg, temp, reg2, reg1);
4669 } else {
4670 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4671 DCHECK(reg1.Is(calling_convention.GetRegisterAt(0)));
4672 DCHECK(RegisterFrom(second).Is(calling_convention.GetRegisterAt(1)));
4673 DCHECK(out_reg.Is(r1));
4674
4675 codegen_->InvokeRuntime(kQuickIdivmod, rem, rem->GetDexPc());
4676 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
4677 }
4678 break;
4679 }
4680
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004681 case DataType::Type::kInt64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004682 codegen_->InvokeRuntime(kQuickLmod, rem, rem->GetDexPc());
4683 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
4684 break;
4685 }
4686
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004687 case DataType::Type::kFloat32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004688 codegen_->InvokeRuntime(kQuickFmodf, rem, rem->GetDexPc());
4689 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4690 break;
4691 }
4692
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004693 case DataType::Type::kFloat64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004694 codegen_->InvokeRuntime(kQuickFmod, rem, rem->GetDexPc());
4695 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4696 break;
4697 }
4698
4699 default:
4700 LOG(FATAL) << "Unexpected rem type " << type;
4701 }
4702}
4703
Aart Bik1f8d51b2018-02-15 10:42:37 -08004704static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4705 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4706 switch (minmax->GetResultType()) {
4707 case DataType::Type::kInt32:
4708 locations->SetInAt(0, Location::RequiresRegister());
4709 locations->SetInAt(1, Location::RequiresRegister());
4710 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4711 break;
4712 case DataType::Type::kInt64:
4713 locations->SetInAt(0, Location::RequiresRegister());
4714 locations->SetInAt(1, Location::RequiresRegister());
4715 locations->SetOut(Location::SameAsFirstInput());
4716 break;
4717 case DataType::Type::kFloat32:
4718 locations->SetInAt(0, Location::RequiresFpuRegister());
4719 locations->SetInAt(1, Location::RequiresFpuRegister());
4720 locations->SetOut(Location::SameAsFirstInput());
4721 locations->AddTemp(Location::RequiresRegister());
4722 break;
4723 case DataType::Type::kFloat64:
4724 locations->SetInAt(0, Location::RequiresFpuRegister());
4725 locations->SetInAt(1, Location::RequiresFpuRegister());
4726 locations->SetOut(Location::SameAsFirstInput());
4727 break;
4728 default:
4729 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4730 }
4731}
4732
Aart Bik351df3e2018-03-07 11:54:57 -08004733void InstructionCodeGeneratorARMVIXL::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004734 Location op1_loc = locations->InAt(0);
4735 Location op2_loc = locations->InAt(1);
4736 Location out_loc = locations->Out();
4737
4738 vixl32::Register op1 = RegisterFrom(op1_loc);
4739 vixl32::Register op2 = RegisterFrom(op2_loc);
4740 vixl32::Register out = RegisterFrom(out_loc);
4741
4742 __ Cmp(op1, op2);
4743
4744 {
4745 ExactAssemblyScope aas(GetVIXLAssembler(),
4746 3 * kMaxInstructionSizeInBytes,
4747 CodeBufferCheckScope::kMaximumSize);
4748
4749 __ ite(is_min ? lt : gt);
4750 __ mov(is_min ? lt : gt, out, op1);
4751 __ mov(is_min ? ge : le, out, op2);
4752 }
4753}
4754
4755void InstructionCodeGeneratorARMVIXL::GenerateMinMaxLong(LocationSummary* locations, bool is_min) {
4756 Location op1_loc = locations->InAt(0);
4757 Location op2_loc = locations->InAt(1);
4758 Location out_loc = locations->Out();
4759
4760 // Optimization: don't generate any code if inputs are the same.
4761 if (op1_loc.Equals(op2_loc)) {
4762 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in location builder.
4763 return;
4764 }
4765
4766 vixl32::Register op1_lo = LowRegisterFrom(op1_loc);
4767 vixl32::Register op1_hi = HighRegisterFrom(op1_loc);
4768 vixl32::Register op2_lo = LowRegisterFrom(op2_loc);
4769 vixl32::Register op2_hi = HighRegisterFrom(op2_loc);
4770 vixl32::Register out_lo = LowRegisterFrom(out_loc);
4771 vixl32::Register out_hi = HighRegisterFrom(out_loc);
4772 UseScratchRegisterScope temps(GetVIXLAssembler());
4773 const vixl32::Register temp = temps.Acquire();
4774
4775 DCHECK(op1_lo.Is(out_lo));
4776 DCHECK(op1_hi.Is(out_hi));
4777
4778 // Compare op1 >= op2, or op1 < op2.
4779 __ Cmp(out_lo, op2_lo);
4780 __ Sbcs(temp, out_hi, op2_hi);
4781
4782 // Now GE/LT condition code is correct for the long comparison.
4783 {
4784 vixl32::ConditionType cond = is_min ? ge : lt;
4785 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4786 3 * kMaxInstructionSizeInBytes,
4787 CodeBufferCheckScope::kMaximumSize);
4788 __ itt(cond);
4789 __ mov(cond, out_lo, op2_lo);
4790 __ mov(cond, out_hi, op2_hi);
4791 }
4792}
4793
Aart Bik351df3e2018-03-07 11:54:57 -08004794void InstructionCodeGeneratorARMVIXL::GenerateMinMaxFloat(HInstruction* minmax, bool is_min) {
4795 LocationSummary* locations = minmax->GetLocations();
Aart Bik1f8d51b2018-02-15 10:42:37 -08004796 Location op1_loc = locations->InAt(0);
4797 Location op2_loc = locations->InAt(1);
4798 Location out_loc = locations->Out();
4799
4800 // Optimization: don't generate any code if inputs are the same.
4801 if (op1_loc.Equals(op2_loc)) {
4802 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in location builder.
4803 return;
4804 }
4805
4806 vixl32::SRegister op1 = SRegisterFrom(op1_loc);
4807 vixl32::SRegister op2 = SRegisterFrom(op2_loc);
4808 vixl32::SRegister out = SRegisterFrom(out_loc);
4809
4810 UseScratchRegisterScope temps(GetVIXLAssembler());
4811 const vixl32::Register temp1 = temps.Acquire();
4812 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(0));
4813 vixl32::Label nan, done;
Aart Bik351df3e2018-03-07 11:54:57 -08004814 vixl32::Label* final_label = codegen_->GetFinalLabel(minmax, &done);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004815
4816 DCHECK(op1.Is(out));
4817
4818 __ Vcmp(op1, op2);
4819 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
Andreas Gampe3db70682018-12-26 15:12:03 -08004820 __ B(vs, &nan, /* is_far_target= */ false); // if un-ordered, go to NaN handling.
Aart Bik1f8d51b2018-02-15 10:42:37 -08004821
4822 // op1 <> op2
4823 vixl32::ConditionType cond = is_min ? gt : lt;
4824 {
4825 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4826 2 * kMaxInstructionSizeInBytes,
4827 CodeBufferCheckScope::kMaximumSize);
4828 __ it(cond);
4829 __ vmov(cond, F32, out, op2);
4830 }
4831 // for <>(not equal), we've done min/max calculation.
Andreas Gampe3db70682018-12-26 15:12:03 -08004832 __ B(ne, final_label, /* is_far_target= */ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004833
4834 // handle op1 == op2, max(+0.0,-0.0), min(+0.0,-0.0).
4835 __ Vmov(temp1, op1);
4836 __ Vmov(temp2, op2);
4837 if (is_min) {
4838 __ Orr(temp1, temp1, temp2);
4839 } else {
4840 __ And(temp1, temp1, temp2);
4841 }
4842 __ Vmov(out, temp1);
4843 __ B(final_label);
4844
4845 // handle NaN input.
4846 __ Bind(&nan);
4847 __ Movt(temp1, High16Bits(kNanFloat)); // 0x7FC0xxxx is a NaN.
4848 __ Vmov(out, temp1);
4849
4850 if (done.IsReferenced()) {
4851 __ Bind(&done);
4852 }
4853}
4854
Aart Bik351df3e2018-03-07 11:54:57 -08004855void InstructionCodeGeneratorARMVIXL::GenerateMinMaxDouble(HInstruction* minmax, bool is_min) {
4856 LocationSummary* locations = minmax->GetLocations();
Aart Bik1f8d51b2018-02-15 10:42:37 -08004857 Location op1_loc = locations->InAt(0);
4858 Location op2_loc = locations->InAt(1);
4859 Location out_loc = locations->Out();
4860
4861 // Optimization: don't generate any code if inputs are the same.
4862 if (op1_loc.Equals(op2_loc)) {
4863 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in.
4864 return;
4865 }
4866
4867 vixl32::DRegister op1 = DRegisterFrom(op1_loc);
4868 vixl32::DRegister op2 = DRegisterFrom(op2_loc);
4869 vixl32::DRegister out = DRegisterFrom(out_loc);
4870 vixl32::Label handle_nan_eq, done;
Aart Bik351df3e2018-03-07 11:54:57 -08004871 vixl32::Label* final_label = codegen_->GetFinalLabel(minmax, &done);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004872
4873 DCHECK(op1.Is(out));
4874
4875 __ Vcmp(op1, op2);
4876 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
Andreas Gampe3db70682018-12-26 15:12:03 -08004877 __ B(vs, &handle_nan_eq, /* is_far_target= */ false); // if un-ordered, go to NaN handling.
Aart Bik1f8d51b2018-02-15 10:42:37 -08004878
4879 // op1 <> op2
4880 vixl32::ConditionType cond = is_min ? gt : lt;
4881 {
4882 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4883 2 * kMaxInstructionSizeInBytes,
4884 CodeBufferCheckScope::kMaximumSize);
4885 __ it(cond);
4886 __ vmov(cond, F64, out, op2);
4887 }
4888 // for <>(not equal), we've done min/max calculation.
Andreas Gampe3db70682018-12-26 15:12:03 -08004889 __ B(ne, final_label, /* is_far_target= */ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004890
4891 // handle op1 == op2, max(+0.0,-0.0).
4892 if (!is_min) {
4893 __ Vand(F64, out, op1, op2);
4894 __ B(final_label);
4895 }
4896
4897 // handle op1 == op2, min(+0.0,-0.0), NaN input.
4898 __ Bind(&handle_nan_eq);
4899 __ Vorr(F64, out, op1, op2); // assemble op1/-0.0/NaN.
4900
4901 if (done.IsReferenced()) {
4902 __ Bind(&done);
4903 }
4904}
4905
Aart Bik351df3e2018-03-07 11:54:57 -08004906void InstructionCodeGeneratorARMVIXL::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4907 DataType::Type type = minmax->GetResultType();
4908 switch (type) {
4909 case DataType::Type::kInt32:
4910 GenerateMinMaxInt(minmax->GetLocations(), is_min);
4911 break;
4912 case DataType::Type::kInt64:
4913 GenerateMinMaxLong(minmax->GetLocations(), is_min);
4914 break;
4915 case DataType::Type::kFloat32:
4916 GenerateMinMaxFloat(minmax, is_min);
4917 break;
4918 case DataType::Type::kFloat64:
4919 GenerateMinMaxDouble(minmax, is_min);
4920 break;
4921 default:
4922 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4923 }
4924}
4925
Aart Bik1f8d51b2018-02-15 10:42:37 -08004926void LocationsBuilderARMVIXL::VisitMin(HMin* min) {
4927 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4928}
4929
4930void InstructionCodeGeneratorARMVIXL::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004931 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004932}
4933
4934void LocationsBuilderARMVIXL::VisitMax(HMax* max) {
4935 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4936}
4937
4938void InstructionCodeGeneratorARMVIXL::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004939 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004940}
4941
Aart Bik3dad3412018-02-28 12:01:46 -08004942void LocationsBuilderARMVIXL::VisitAbs(HAbs* abs) {
4943 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4944 switch (abs->GetResultType()) {
4945 case DataType::Type::kInt32:
4946 case DataType::Type::kInt64:
4947 locations->SetInAt(0, Location::RequiresRegister());
4948 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4949 locations->AddTemp(Location::RequiresRegister());
4950 break;
4951 case DataType::Type::kFloat32:
4952 case DataType::Type::kFloat64:
4953 locations->SetInAt(0, Location::RequiresFpuRegister());
4954 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4955 break;
4956 default:
4957 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
4958 }
4959}
4960
4961void InstructionCodeGeneratorARMVIXL::VisitAbs(HAbs* abs) {
4962 LocationSummary* locations = abs->GetLocations();
4963 switch (abs->GetResultType()) {
4964 case DataType::Type::kInt32: {
4965 vixl32::Register in_reg = RegisterFrom(locations->InAt(0));
4966 vixl32::Register out_reg = RegisterFrom(locations->Out());
4967 vixl32::Register mask = RegisterFrom(locations->GetTemp(0));
4968 __ Asr(mask, in_reg, 31);
4969 __ Add(out_reg, in_reg, mask);
4970 __ Eor(out_reg, out_reg, mask);
4971 break;
4972 }
4973 case DataType::Type::kInt64: {
4974 Location in = locations->InAt(0);
4975 vixl32::Register in_reg_lo = LowRegisterFrom(in);
4976 vixl32::Register in_reg_hi = HighRegisterFrom(in);
4977 Location output = locations->Out();
4978 vixl32::Register out_reg_lo = LowRegisterFrom(output);
4979 vixl32::Register out_reg_hi = HighRegisterFrom(output);
4980 DCHECK(!out_reg_lo.Is(in_reg_hi)) << "Diagonal overlap unexpected.";
4981 vixl32::Register mask = RegisterFrom(locations->GetTemp(0));
4982 __ Asr(mask, in_reg_hi, 31);
4983 __ Adds(out_reg_lo, in_reg_lo, mask);
4984 __ Adc(out_reg_hi, in_reg_hi, mask);
4985 __ Eor(out_reg_lo, out_reg_lo, mask);
4986 __ Eor(out_reg_hi, out_reg_hi, mask);
4987 break;
4988 }
4989 case DataType::Type::kFloat32:
4990 case DataType::Type::kFloat64:
4991 __ Vabs(OutputVRegister(abs), InputVRegisterAt(abs, 0));
4992 break;
4993 default:
4994 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
4995 }
4996}
Artem Serov551b28f2016-10-18 19:11:30 +01004997
Scott Wakelingfe885462016-09-22 10:24:38 +01004998void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Artem Serov657022c2016-11-23 14:19:38 +00004999 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Scott Wakelingfe885462016-09-22 10:24:38 +01005000 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01005001}
5002
5003void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
5004 DivZeroCheckSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005005 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARMVIXL(instruction);
Scott Wakelingfe885462016-09-22 10:24:38 +01005006 codegen_->AddSlowPath(slow_path);
5007
5008 LocationSummary* locations = instruction->GetLocations();
5009 Location value = locations->InAt(0);
5010
5011 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005012 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005013 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005014 case DataType::Type::kInt8:
5015 case DataType::Type::kUint16:
5016 case DataType::Type::kInt16:
5017 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01005018 if (value.IsRegister()) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00005019 __ CompareAndBranchIfZero(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
Scott Wakelingfe885462016-09-22 10:24:38 +01005020 } else {
5021 DCHECK(value.IsConstant()) << value;
Anton Kirilov644032c2016-12-06 17:51:43 +00005022 if (Int32ConstantFrom(value) == 0) {
Scott Wakelingfe885462016-09-22 10:24:38 +01005023 __ B(slow_path->GetEntryLabel());
5024 }
5025 }
5026 break;
5027 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005028 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01005029 if (value.IsRegisterPair()) {
5030 UseScratchRegisterScope temps(GetVIXLAssembler());
5031 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005032 __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01005033 __ B(eq, slow_path->GetEntryLabel());
5034 } else {
5035 DCHECK(value.IsConstant()) << value;
Anton Kirilov644032c2016-12-06 17:51:43 +00005036 if (Int64ConstantFrom(value) == 0) {
Scott Wakelingfe885462016-09-22 10:24:38 +01005037 __ B(slow_path->GetEntryLabel());
5038 }
5039 }
5040 break;
5041 }
5042 default:
5043 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
5044 }
5045}
5046
Artem Serov02109dd2016-09-23 17:17:54 +01005047void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HRor* ror) {
5048 LocationSummary* locations = ror->GetLocations();
5049 vixl32::Register in = InputRegisterAt(ror, 0);
5050 Location rhs = locations->InAt(1);
5051 vixl32::Register out = OutputRegister(ror);
5052
5053 if (rhs.IsConstant()) {
5054 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
5055 // so map all rotations to a +ve. equivalent in that range.
5056 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
5057 uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
5058 if (rot) {
5059 // Rotate, mapping left rotations to right equivalents if necessary.
5060 // (e.g. left by 2 bits == right by 30.)
5061 __ Ror(out, in, rot);
5062 } else if (!out.Is(in)) {
5063 __ Mov(out, in);
5064 }
5065 } else {
5066 __ Ror(out, in, RegisterFrom(rhs));
5067 }
5068}
5069
5070// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
5071// rotates by swapping input regs (effectively rotating by the first 32-bits of
5072// a larger rotation) or flipping direction (thus treating larger right/left
5073// rotations as sub-word sized rotations in the other direction) as appropriate.
5074void InstructionCodeGeneratorARMVIXL::HandleLongRotate(HRor* ror) {
5075 LocationSummary* locations = ror->GetLocations();
5076 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
5077 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
5078 Location rhs = locations->InAt(1);
5079 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
5080 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
5081
5082 if (rhs.IsConstant()) {
5083 uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant());
5084 // Map all rotations to +ve. equivalents on the interval [0,63].
5085 rot &= kMaxLongShiftDistance;
5086 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
5087 // logic below to a simple pair of binary orr.
5088 // (e.g. 34 bits == in_reg swap + 2 bits right.)
5089 if (rot >= kArmBitsPerWord) {
5090 rot -= kArmBitsPerWord;
5091 std::swap(in_reg_hi, in_reg_lo);
5092 }
5093 // Rotate, or mov to out for zero or word size rotations.
5094 if (rot != 0u) {
Scott Wakelingb77051e2016-11-21 19:46:00 +00005095 __ Lsr(out_reg_hi, in_reg_hi, Operand::From(rot));
Artem Serov02109dd2016-09-23 17:17:54 +01005096 __ Orr(out_reg_hi, out_reg_hi, Operand(in_reg_lo, ShiftType::LSL, kArmBitsPerWord - rot));
Scott Wakelingb77051e2016-11-21 19:46:00 +00005097 __ Lsr(out_reg_lo, in_reg_lo, Operand::From(rot));
Artem Serov02109dd2016-09-23 17:17:54 +01005098 __ Orr(out_reg_lo, out_reg_lo, Operand(in_reg_hi, ShiftType::LSL, kArmBitsPerWord - rot));
5099 } else {
5100 __ Mov(out_reg_lo, in_reg_lo);
5101 __ Mov(out_reg_hi, in_reg_hi);
5102 }
5103 } else {
5104 vixl32::Register shift_right = RegisterFrom(locations->GetTemp(0));
5105 vixl32::Register shift_left = RegisterFrom(locations->GetTemp(1));
5106 vixl32::Label end;
5107 vixl32::Label shift_by_32_plus_shift_right;
Anton Kirilov6f644202017-02-27 18:29:45 +00005108 vixl32::Label* final_label = codegen_->GetFinalLabel(ror, &end);
Artem Serov02109dd2016-09-23 17:17:54 +01005109
5110 __ And(shift_right, RegisterFrom(rhs), 0x1F);
5111 __ Lsrs(shift_left, RegisterFrom(rhs), 6);
Scott Wakelingbffdc702016-12-07 17:46:03 +00005112 __ Rsb(LeaveFlags, shift_left, shift_right, Operand::From(kArmBitsPerWord));
Andreas Gampe3db70682018-12-26 15:12:03 -08005113 __ B(cc, &shift_by_32_plus_shift_right, /* is_far_target= */ false);
Artem Serov02109dd2016-09-23 17:17:54 +01005114
5115 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
5116 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
5117 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
5118 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
5119 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
5120 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
5121 __ Lsr(shift_left, in_reg_hi, shift_right);
5122 __ Add(out_reg_lo, out_reg_lo, shift_left);
Anton Kirilov6f644202017-02-27 18:29:45 +00005123 __ B(final_label);
Artem Serov02109dd2016-09-23 17:17:54 +01005124
5125 __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right.
5126 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
5127 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
5128 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
5129 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
5130 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
5131 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
5132 __ Lsl(shift_right, in_reg_hi, shift_left);
5133 __ Add(out_reg_lo, out_reg_lo, shift_right);
5134
Anton Kirilov6f644202017-02-27 18:29:45 +00005135 if (end.IsReferenced()) {
5136 __ Bind(&end);
5137 }
Artem Serov02109dd2016-09-23 17:17:54 +01005138 }
5139}
5140
5141void LocationsBuilderARMVIXL::VisitRor(HRor* ror) {
5142 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005143 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Artem Serov02109dd2016-09-23 17:17:54 +01005144 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005145 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01005146 locations->SetInAt(0, Location::RequiresRegister());
5147 locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1)));
5148 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5149 break;
5150 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005151 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01005152 locations->SetInAt(0, Location::RequiresRegister());
5153 if (ror->InputAt(1)->IsConstant()) {
5154 locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant()));
5155 } else {
5156 locations->SetInAt(1, Location::RequiresRegister());
5157 locations->AddTemp(Location::RequiresRegister());
5158 locations->AddTemp(Location::RequiresRegister());
5159 }
5160 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5161 break;
5162 }
5163 default:
5164 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
5165 }
5166}
5167
5168void InstructionCodeGeneratorARMVIXL::VisitRor(HRor* ror) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005169 DataType::Type type = ror->GetResultType();
Artem Serov02109dd2016-09-23 17:17:54 +01005170 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005171 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01005172 HandleIntegerRotate(ror);
5173 break;
5174 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005175 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01005176 HandleLongRotate(ror);
5177 break;
5178 }
5179 default:
5180 LOG(FATAL) << "Unexpected operation type " << type;
5181 UNREACHABLE();
5182 }
5183}
5184
Artem Serov02d37832016-10-25 15:25:33 +01005185void LocationsBuilderARMVIXL::HandleShift(HBinaryOperation* op) {
5186 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
5187
5188 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005189 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005190
5191 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005192 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01005193 locations->SetInAt(0, Location::RequiresRegister());
5194 if (op->InputAt(1)->IsConstant()) {
5195 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
5196 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5197 } else {
5198 locations->SetInAt(1, Location::RequiresRegister());
5199 // Make the output overlap, as it will be used to hold the masked
5200 // second input.
5201 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5202 }
5203 break;
5204 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005205 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005206 locations->SetInAt(0, Location::RequiresRegister());
5207 if (op->InputAt(1)->IsConstant()) {
5208 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
5209 // For simplicity, use kOutputOverlap even though we only require that low registers
5210 // don't clash with high registers which the register allocator currently guarantees.
5211 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5212 } else {
5213 locations->SetInAt(1, Location::RequiresRegister());
5214 locations->AddTemp(Location::RequiresRegister());
5215 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5216 }
5217 break;
5218 }
5219 default:
5220 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
5221 }
5222}
5223
5224void InstructionCodeGeneratorARMVIXL::HandleShift(HBinaryOperation* op) {
5225 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
5226
5227 LocationSummary* locations = op->GetLocations();
5228 Location out = locations->Out();
5229 Location first = locations->InAt(0);
5230 Location second = locations->InAt(1);
5231
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005232 DataType::Type type = op->GetResultType();
Artem Serov02d37832016-10-25 15:25:33 +01005233 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005234 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01005235 vixl32::Register out_reg = OutputRegister(op);
5236 vixl32::Register first_reg = InputRegisterAt(op, 0);
5237 if (second.IsRegister()) {
5238 vixl32::Register second_reg = RegisterFrom(second);
5239 // ARM doesn't mask the shift count so we need to do it ourselves.
5240 __ And(out_reg, second_reg, kMaxIntShiftDistance);
5241 if (op->IsShl()) {
5242 __ Lsl(out_reg, first_reg, out_reg);
5243 } else if (op->IsShr()) {
5244 __ Asr(out_reg, first_reg, out_reg);
5245 } else {
5246 __ Lsr(out_reg, first_reg, out_reg);
5247 }
5248 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00005249 int32_t cst = Int32ConstantFrom(second);
Artem Serov02d37832016-10-25 15:25:33 +01005250 uint32_t shift_value = cst & kMaxIntShiftDistance;
5251 if (shift_value == 0) { // ARM does not support shifting with 0 immediate.
5252 __ Mov(out_reg, first_reg);
5253 } else if (op->IsShl()) {
5254 __ Lsl(out_reg, first_reg, shift_value);
5255 } else if (op->IsShr()) {
5256 __ Asr(out_reg, first_reg, shift_value);
5257 } else {
5258 __ Lsr(out_reg, first_reg, shift_value);
5259 }
5260 }
5261 break;
5262 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005263 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005264 vixl32::Register o_h = HighRegisterFrom(out);
5265 vixl32::Register o_l = LowRegisterFrom(out);
5266
5267 vixl32::Register high = HighRegisterFrom(first);
5268 vixl32::Register low = LowRegisterFrom(first);
5269
5270 if (second.IsRegister()) {
5271 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
5272
5273 vixl32::Register second_reg = RegisterFrom(second);
5274
5275 if (op->IsShl()) {
5276 __ And(o_l, second_reg, kMaxLongShiftDistance);
5277 // Shift the high part
5278 __ Lsl(o_h, high, o_l);
5279 // Shift the low part and `or` what overflew on the high part
Scott Wakelingb77051e2016-11-21 19:46:00 +00005280 __ Rsb(temp, o_l, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005281 __ Lsr(temp, low, temp);
5282 __ Orr(o_h, o_h, temp);
5283 // If the shift is > 32 bits, override the high part
Scott Wakelingb77051e2016-11-21 19:46:00 +00005284 __ Subs(temp, o_l, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005285 {
Artem Serov0fb37192016-12-06 18:13:40 +00005286 ExactAssemblyScope guard(GetVIXLAssembler(),
5287 2 * vixl32::kMaxInstructionSizeInBytes,
5288 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01005289 __ it(pl);
5290 __ lsl(pl, o_h, low, temp);
5291 }
5292 // Shift the low part
5293 __ Lsl(o_l, low, o_l);
5294 } else if (op->IsShr()) {
5295 __ And(o_h, second_reg, kMaxLongShiftDistance);
5296 // Shift the low part
5297 __ Lsr(o_l, low, o_h);
5298 // Shift the high part and `or` what underflew on the low part
Scott Wakelingb77051e2016-11-21 19:46:00 +00005299 __ Rsb(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005300 __ Lsl(temp, high, temp);
5301 __ Orr(o_l, o_l, temp);
5302 // If the shift is > 32 bits, override the low part
Scott Wakelingb77051e2016-11-21 19:46:00 +00005303 __ Subs(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005304 {
Artem Serov0fb37192016-12-06 18:13:40 +00005305 ExactAssemblyScope guard(GetVIXLAssembler(),
5306 2 * vixl32::kMaxInstructionSizeInBytes,
5307 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01005308 __ it(pl);
5309 __ asr(pl, o_l, high, temp);
5310 }
5311 // Shift the high part
5312 __ Asr(o_h, high, o_h);
5313 } else {
5314 __ And(o_h, second_reg, kMaxLongShiftDistance);
5315 // same as Shr except we use `Lsr`s and not `Asr`s
5316 __ Lsr(o_l, low, o_h);
Scott Wakelingb77051e2016-11-21 19:46:00 +00005317 __ Rsb(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005318 __ Lsl(temp, high, temp);
5319 __ Orr(o_l, o_l, temp);
Scott Wakelingb77051e2016-11-21 19:46:00 +00005320 __ Subs(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01005321 {
Artem Serov0fb37192016-12-06 18:13:40 +00005322 ExactAssemblyScope guard(GetVIXLAssembler(),
5323 2 * vixl32::kMaxInstructionSizeInBytes,
5324 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01005325 __ it(pl);
5326 __ lsr(pl, o_l, high, temp);
5327 }
5328 __ Lsr(o_h, high, o_h);
5329 }
5330 } else {
5331 // Register allocator doesn't create partial overlap.
5332 DCHECK(!o_l.Is(high));
5333 DCHECK(!o_h.Is(low));
Anton Kirilov644032c2016-12-06 17:51:43 +00005334 int32_t cst = Int32ConstantFrom(second);
Artem Serov02d37832016-10-25 15:25:33 +01005335 uint32_t shift_value = cst & kMaxLongShiftDistance;
5336 if (shift_value > 32) {
5337 if (op->IsShl()) {
5338 __ Lsl(o_h, low, shift_value - 32);
5339 __ Mov(o_l, 0);
5340 } else if (op->IsShr()) {
5341 __ Asr(o_l, high, shift_value - 32);
5342 __ Asr(o_h, high, 31);
5343 } else {
5344 __ Lsr(o_l, high, shift_value - 32);
5345 __ Mov(o_h, 0);
5346 }
5347 } else if (shift_value == 32) {
5348 if (op->IsShl()) {
5349 __ Mov(o_h, low);
5350 __ Mov(o_l, 0);
5351 } else if (op->IsShr()) {
5352 __ Mov(o_l, high);
5353 __ Asr(o_h, high, 31);
5354 } else {
5355 __ Mov(o_l, high);
5356 __ Mov(o_h, 0);
5357 }
5358 } else if (shift_value == 1) {
5359 if (op->IsShl()) {
5360 __ Lsls(o_l, low, 1);
5361 __ Adc(o_h, high, high);
5362 } else if (op->IsShr()) {
5363 __ Asrs(o_h, high, 1);
5364 __ Rrx(o_l, low);
5365 } else {
5366 __ Lsrs(o_h, high, 1);
5367 __ Rrx(o_l, low);
5368 }
Nicolas Geoffray9b195cc2019-04-02 08:29:00 +01005369 } else if (shift_value == 0) {
5370 __ Mov(o_l, low);
5371 __ Mov(o_h, high);
Artem Serov02d37832016-10-25 15:25:33 +01005372 } else {
Nicolas Geoffray9b195cc2019-04-02 08:29:00 +01005373 DCHECK(0 < shift_value && shift_value < 32) << shift_value;
Artem Serov02d37832016-10-25 15:25:33 +01005374 if (op->IsShl()) {
5375 __ Lsl(o_h, high, shift_value);
5376 __ Orr(o_h, o_h, Operand(low, ShiftType::LSR, 32 - shift_value));
5377 __ Lsl(o_l, low, shift_value);
5378 } else if (op->IsShr()) {
5379 __ Lsr(o_l, low, shift_value);
5380 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
5381 __ Asr(o_h, high, shift_value);
5382 } else {
5383 __ Lsr(o_l, low, shift_value);
5384 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
5385 __ Lsr(o_h, high, shift_value);
5386 }
5387 }
5388 }
5389 break;
5390 }
5391 default:
5392 LOG(FATAL) << "Unexpected operation type " << type;
5393 UNREACHABLE();
5394 }
5395}
5396
5397void LocationsBuilderARMVIXL::VisitShl(HShl* shl) {
5398 HandleShift(shl);
5399}
5400
5401void InstructionCodeGeneratorARMVIXL::VisitShl(HShl* shl) {
5402 HandleShift(shl);
5403}
5404
5405void LocationsBuilderARMVIXL::VisitShr(HShr* shr) {
5406 HandleShift(shr);
5407}
5408
5409void InstructionCodeGeneratorARMVIXL::VisitShr(HShr* shr) {
5410 HandleShift(shr);
5411}
5412
5413void LocationsBuilderARMVIXL::VisitUShr(HUShr* ushr) {
5414 HandleShift(ushr);
5415}
5416
5417void InstructionCodeGeneratorARMVIXL::VisitUShr(HUShr* ushr) {
5418 HandleShift(ushr);
5419}
5420
5421void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005422 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5423 instruction, LocationSummary::kCallOnMainOnly);
Alex Lightd109e302018-06-27 10:25:41 -07005424 InvokeRuntimeCallingConventionARMVIXL calling_convention;
5425 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Artem Serov02d37832016-10-25 15:25:33 +01005426 locations->SetOut(LocationFrom(r0));
5427}
5428
5429void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005430 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5431 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Andra Danciua0130e82020-07-23 12:34:56 +00005432 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 12);
Artem Serov02d37832016-10-25 15:25:33 +01005433}
5434
5435void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005436 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5437 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov02d37832016-10-25 15:25:33 +01005438 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Artem Serov02d37832016-10-25 15:25:33 +01005439 locations->SetOut(LocationFrom(r0));
Nicolas Geoffray8c7c4f12017-01-26 10:13:11 +00005440 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5441 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Artem Serov02d37832016-10-25 15:25:33 +01005442}
5443
5444void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01005445 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
5446 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Artem Serov7b3672e2017-02-03 17:30:34 +00005447 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005448 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Artem Serov7b3672e2017-02-03 17:30:34 +00005449 DCHECK(!codegen_->IsLeafMethod());
Andra Danciua0130e82020-07-23 12:34:56 +00005450 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 13);
Artem Serov02d37832016-10-25 15:25:33 +01005451}
5452
5453void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) {
5454 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005455 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005456 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5457 if (location.IsStackSlot()) {
5458 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5459 } else if (location.IsDoubleStackSlot()) {
5460 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5461 }
5462 locations->SetOut(location);
5463}
5464
5465void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
5466 HParameterValue* instruction ATTRIBUTE_UNUSED) {
5467 // Nothing to do, the parameter is already at its location.
5468}
5469
5470void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) {
5471 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005472 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005473 locations->SetOut(LocationFrom(kMethodRegister));
5474}
5475
5476void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
5477 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5478 // Nothing to do, the method is already at its location.
5479}
5480
5481void LocationsBuilderARMVIXL::VisitNot(HNot* not_) {
5482 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005483 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005484 locations->SetInAt(0, Location::RequiresRegister());
5485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5486}
5487
5488void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) {
5489 LocationSummary* locations = not_->GetLocations();
5490 Location out = locations->Out();
5491 Location in = locations->InAt(0);
5492 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005493 case DataType::Type::kInt32:
Artem Serov02d37832016-10-25 15:25:33 +01005494 __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0));
5495 break;
5496
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005497 case DataType::Type::kInt64:
Artem Serov02d37832016-10-25 15:25:33 +01005498 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
5499 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
5500 break;
5501
5502 default:
5503 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
5504 }
5505}
5506
Scott Wakelingc34dba72016-10-03 10:14:44 +01005507void LocationsBuilderARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
5508 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005509 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005510 locations->SetInAt(0, Location::RequiresRegister());
5511 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5512}
5513
5514void InstructionCodeGeneratorARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
5515 __ Eor(OutputRegister(bool_not), InputRegister(bool_not), 1);
5516}
5517
Artem Serov02d37832016-10-25 15:25:33 +01005518void LocationsBuilderARMVIXL::VisitCompare(HCompare* compare) {
5519 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005520 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005521 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005522 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005523 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005524 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005525 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005526 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005527 case DataType::Type::kInt32:
5528 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005529 locations->SetInAt(0, Location::RequiresRegister());
5530 locations->SetInAt(1, Location::RequiresRegister());
5531 // Output overlaps because it is written before doing the low comparison.
5532 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5533 break;
5534 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005535 case DataType::Type::kFloat32:
5536 case DataType::Type::kFloat64: {
Artem Serov02d37832016-10-25 15:25:33 +01005537 locations->SetInAt(0, Location::RequiresFpuRegister());
5538 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1)));
5539 locations->SetOut(Location::RequiresRegister());
5540 break;
5541 }
5542 default:
5543 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
5544 }
5545}
5546
5547void InstructionCodeGeneratorARMVIXL::VisitCompare(HCompare* compare) {
5548 LocationSummary* locations = compare->GetLocations();
5549 vixl32::Register out = OutputRegister(compare);
5550 Location left = locations->InAt(0);
5551 Location right = locations->InAt(1);
5552
5553 vixl32::Label less, greater, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00005554 vixl32::Label* final_label = codegen_->GetFinalLabel(compare, &done);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005555 DataType::Type type = compare->InputAt(0)->GetType();
Vladimir Marko33bff252017-11-01 14:35:42 +00005556 vixl32::Condition less_cond = vixl32::Condition::None();
Artem Serov02d37832016-10-25 15:25:33 +01005557 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005558 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005559 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005560 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005561 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005562 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005563 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01005564 // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags.
5565 __ Mov(out, 0);
5566 __ Cmp(RegisterFrom(left), RegisterFrom(right)); // Signed compare.
5567 less_cond = lt;
5568 break;
5569 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005570 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005571 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right)); // Signed compare.
Andreas Gampe3db70682018-12-26 15:12:03 -08005572 __ B(lt, &less, /* is_far_target= */ false);
5573 __ B(gt, &greater, /* is_far_target= */ false);
Artem Serov02d37832016-10-25 15:25:33 +01005574 // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags.
5575 __ Mov(out, 0);
5576 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right)); // Unsigned compare.
5577 less_cond = lo;
5578 break;
5579 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005580 case DataType::Type::kFloat32:
5581 case DataType::Type::kFloat64: {
Artem Serov02d37832016-10-25 15:25:33 +01005582 __ Mov(out, 0);
Donghui Bai426b49c2016-11-08 14:55:38 +08005583 GenerateVcmp(compare, codegen_);
Artem Serov02d37832016-10-25 15:25:33 +01005584 // To branch on the FP compare result we transfer FPSCR to APSR (encoded as PC in VMRS).
5585 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
5586 less_cond = ARMFPCondition(kCondLT, compare->IsGtBias());
5587 break;
5588 }
5589 default:
5590 LOG(FATAL) << "Unexpected compare type " << type;
5591 UNREACHABLE();
5592 }
5593
Andreas Gampe3db70682018-12-26 15:12:03 -08005594 __ B(eq, final_label, /* is_far_target= */ false);
5595 __ B(less_cond, &less, /* is_far_target= */ false);
Artem Serov02d37832016-10-25 15:25:33 +01005596
5597 __ Bind(&greater);
5598 __ Mov(out, 1);
Anton Kirilov6f644202017-02-27 18:29:45 +00005599 __ B(final_label);
Artem Serov02d37832016-10-25 15:25:33 +01005600
5601 __ Bind(&less);
5602 __ Mov(out, -1);
5603
Anton Kirilov6f644202017-02-27 18:29:45 +00005604 if (done.IsReferenced()) {
5605 __ Bind(&done);
5606 }
Artem Serov02d37832016-10-25 15:25:33 +01005607}
5608
5609void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) {
5610 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005611 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005612 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
5613 locations->SetInAt(i, Location::Any());
5614 }
5615 locations->SetOut(Location::Any());
5616}
5617
5618void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
5619 LOG(FATAL) << "Unreachable";
5620}
5621
5622void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) {
5623 // TODO (ported from quick): revisit ARM barrier kinds.
5624 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
5625 switch (kind) {
5626 case MemBarrierKind::kAnyStore:
5627 case MemBarrierKind::kLoadAny:
5628 case MemBarrierKind::kAnyAny: {
5629 flavor = DmbOptions::ISH;
5630 break;
5631 }
5632 case MemBarrierKind::kStoreStore: {
5633 flavor = DmbOptions::ISHST;
5634 break;
5635 }
5636 default:
5637 LOG(FATAL) << "Unexpected memory barrier " << kind;
5638 }
5639 __ Dmb(flavor);
5640}
5641
5642void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr,
5643 uint32_t offset,
5644 vixl32::Register out_lo,
5645 vixl32::Register out_hi) {
5646 UseScratchRegisterScope temps(GetVIXLAssembler());
5647 if (offset != 0) {
5648 vixl32::Register temp = temps.Acquire();
5649 __ Add(temp, addr, offset);
5650 addr = temp;
5651 }
Scott Wakelingb77051e2016-11-21 19:46:00 +00005652 __ Ldrexd(out_lo, out_hi, MemOperand(addr));
Artem Serov02d37832016-10-25 15:25:33 +01005653}
5654
5655void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr,
5656 uint32_t offset,
5657 vixl32::Register value_lo,
5658 vixl32::Register value_hi,
5659 vixl32::Register temp1,
5660 vixl32::Register temp2,
5661 HInstruction* instruction) {
5662 UseScratchRegisterScope temps(GetVIXLAssembler());
5663 vixl32::Label fail;
5664 if (offset != 0) {
5665 vixl32::Register temp = temps.Acquire();
5666 __ Add(temp, addr, offset);
5667 addr = temp;
5668 }
5669 __ Bind(&fail);
Alexandre Rames374ddf32016-11-04 10:40:49 +00005670 {
5671 // Ensure the pc position is recorded immediately after the `ldrexd` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00005672 ExactAssemblyScope aas(GetVIXLAssembler(),
5673 vixl32::kMaxInstructionSizeInBytes,
5674 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00005675 // We need a load followed by store. (The address used in a STREX instruction must
5676 // be the same as the address in the most recently executed LDREX instruction.)
5677 __ ldrexd(temp1, temp2, MemOperand(addr));
5678 codegen_->MaybeRecordImplicitNullCheck(instruction);
5679 }
Scott Wakelingb77051e2016-11-21 19:46:00 +00005680 __ Strexd(temp1, value_lo, value_hi, MemOperand(addr));
xueliang.zhongf51bc622016-11-04 09:23:32 +00005681 __ CompareAndBranchIfNonZero(temp1, &fail);
Artem Serov02d37832016-10-25 15:25:33 +01005682}
Artem Serov02109dd2016-09-23 17:17:54 +01005683
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005684void LocationsBuilderARMVIXL::HandleFieldSet(
5685 HInstruction* instruction, const FieldInfo& field_info) {
5686 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5687
5688 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005689 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005690 locations->SetInAt(0, Location::RequiresRegister());
5691
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005692 DataType::Type field_type = field_info.GetFieldType();
5693 if (DataType::IsFloatingPointType(field_type)) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005694 locations->SetInAt(1, Location::RequiresFpuRegister());
5695 } else {
5696 locations->SetInAt(1, Location::RequiresRegister());
5697 }
5698
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005699 bool is_wide = field_type == DataType::Type::kInt64 || field_type == DataType::Type::kFloat64;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005700 bool generate_volatile = field_info.IsVolatile()
5701 && is_wide
5702 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
5703 bool needs_write_barrier =
5704 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
5705 // Temporary registers for the write barrier.
5706 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
5707 if (needs_write_barrier) {
5708 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
5709 locations->AddTemp(Location::RequiresRegister());
5710 } else if (generate_volatile) {
5711 // ARM encoding have some additional constraints for ldrexd/strexd:
5712 // - registers need to be consecutive
5713 // - the first register should be even but not R14.
5714 // We don't test for ARM yet, and the assertion makes sure that we
5715 // revisit this if we ever enable ARM encoding.
5716 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5717
5718 locations->AddTemp(Location::RequiresRegister());
5719 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005720 if (field_type == DataType::Type::kFloat64) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005721 // For doubles we need two more registers to copy the value.
5722 locations->AddTemp(LocationFrom(r2));
5723 locations->AddTemp(LocationFrom(r3));
5724 }
5725 }
5726}
5727
5728void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction,
5729 const FieldInfo& field_info,
5730 bool value_can_be_null) {
5731 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5732
5733 LocationSummary* locations = instruction->GetLocations();
5734 vixl32::Register base = InputRegisterAt(instruction, 0);
5735 Location value = locations->InAt(1);
5736
5737 bool is_volatile = field_info.IsVolatile();
5738 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005739 DataType::Type field_type = field_info.GetFieldType();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005740 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5741 bool needs_write_barrier =
5742 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
5743
5744 if (is_volatile) {
5745 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
5746 }
5747
5748 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005749 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005750 case DataType::Type::kUint8:
5751 case DataType::Type::kInt8:
5752 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005753 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005754 case DataType::Type::kInt32: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005755 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
5756 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005757 StoreOperandType operand_type = GetStoreOperandType(field_type);
5758 GetAssembler()->StoreToOffset(operand_type, RegisterFrom(value), base, offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005759 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005760 break;
5761 }
5762
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005763 case DataType::Type::kReference: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005764 vixl32::Register value_reg = RegisterFrom(value);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005765 if (kPoisonHeapReferences && needs_write_barrier) {
5766 // Note that in the case where `value` is a null reference,
5767 // we do not enter this block, as a null reference does not
5768 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005769 DCHECK_EQ(field_type, DataType::Type::kReference);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005770 value_reg = RegisterFrom(locations->GetTemp(0));
5771 __ Mov(value_reg, RegisterFrom(value));
5772 GetAssembler()->PoisonHeapReference(value_reg);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005773 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005774 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
5775 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5776 GetAssembler()->StoreToOffset(kStoreWord, value_reg, base, offset);
5777 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005778 break;
5779 }
5780
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005781 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005782 if (is_volatile && !atomic_ldrd_strd) {
5783 GenerateWideAtomicStore(base,
5784 offset,
5785 LowRegisterFrom(value),
5786 HighRegisterFrom(value),
5787 RegisterFrom(locations->GetTemp(0)),
5788 RegisterFrom(locations->GetTemp(1)),
5789 instruction);
5790 } else {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005791 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
5792 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005793 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
5794 codegen_->MaybeRecordImplicitNullCheck(instruction);
5795 }
5796 break;
5797 }
5798
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005799 case DataType::Type::kFloat32: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005800 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
5801 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005802 GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005803 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005804 break;
5805 }
5806
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005807 case DataType::Type::kFloat64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01005808 vixl32::DRegister value_reg = DRegisterFrom(value);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005809 if (is_volatile && !atomic_ldrd_strd) {
5810 vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0));
5811 vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1));
5812
5813 __ Vmov(value_reg_lo, value_reg_hi, value_reg);
5814
5815 GenerateWideAtomicStore(base,
5816 offset,
5817 value_reg_lo,
5818 value_reg_hi,
5819 RegisterFrom(locations->GetTemp(2)),
5820 RegisterFrom(locations->GetTemp(3)),
5821 instruction);
5822 } else {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01005823 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
5824 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005825 GetAssembler()->StoreDToOffset(value_reg, base, offset);
5826 codegen_->MaybeRecordImplicitNullCheck(instruction);
5827 }
5828 break;
5829 }
5830
Aart Bik66c158e2018-01-31 12:55:04 -08005831 case DataType::Type::kUint32:
5832 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005833 case DataType::Type::kVoid:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005834 LOG(FATAL) << "Unreachable type " << field_type;
5835 UNREACHABLE();
5836 }
5837
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005838 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5839 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
5840 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
5841 codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null);
5842 }
5843
5844 if (is_volatile) {
5845 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
5846 }
5847}
5848
Artem Serov02d37832016-10-25 15:25:33 +01005849void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
5850 const FieldInfo& field_info) {
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005851 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Artem Serov02d37832016-10-25 15:25:33 +01005852
5853 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005854 kEmitCompilerReadBarrier && (field_info.GetFieldType() == DataType::Type::kReference);
Artem Serov02d37832016-10-25 15:25:33 +01005855 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005856 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5857 object_field_get_with_read_barrier
5858 ? LocationSummary::kCallOnSlowPath
5859 : LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005860 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
5861 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5862 }
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005863 locations->SetInAt(0, Location::RequiresRegister());
Artem Serov02d37832016-10-25 15:25:33 +01005864
5865 bool volatile_for_double = field_info.IsVolatile()
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005866 && (field_info.GetFieldType() == DataType::Type::kFloat64)
Artem Serov02d37832016-10-25 15:25:33 +01005867 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
5868 // The output overlaps in case of volatile long: we don't want the
5869 // code generated by GenerateWideAtomicLoad to overwrite the
5870 // object's location. Likewise, in the case of an object field get
5871 // with read barriers enabled, we do not want the load to overwrite
5872 // the object's location, as we need it to emit the read barrier.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005873 bool overlap =
5874 (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) ||
Artem Serov02d37832016-10-25 15:25:33 +01005875 object_field_get_with_read_barrier;
5876
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005877 if (DataType::IsFloatingPointType(instruction->GetType())) {
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005878 locations->SetOut(Location::RequiresFpuRegister());
Artem Serov02d37832016-10-25 15:25:33 +01005879 } else {
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005880 locations->SetOut(Location::RequiresRegister(),
5881 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Artem Serov02d37832016-10-25 15:25:33 +01005882 }
5883 if (volatile_for_double) {
5884 // ARM encoding have some additional constraints for ldrexd/strexd:
5885 // - registers need to be consecutive
5886 // - the first register should be even but not R14.
5887 // We don't test for ARM yet, and the assertion makes sure that we
5888 // revisit this if we ever enable ARM encoding.
5889 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5890 locations->AddTemp(Location::RequiresRegister());
5891 locations->AddTemp(Location::RequiresRegister());
5892 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01005893 // We need a temporary register for the read barrier load in
5894 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier()
5895 // only if the offset is too big.
5896 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005897 locations->AddTemp(Location::RequiresRegister());
5898 }
Artem Serov02d37832016-10-25 15:25:33 +01005899 }
5900}
5901
5902Location LocationsBuilderARMVIXL::ArithmeticZeroOrFpuRegister(HInstruction* input) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005903 DCHECK(DataType::IsFloatingPointType(input->GetType())) << input->GetType();
Artem Serov02d37832016-10-25 15:25:33 +01005904 if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) ||
5905 (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) {
5906 return Location::ConstantLocation(input->AsConstant());
5907 } else {
5908 return Location::RequiresFpuRegister();
5909 }
5910}
5911
Artem Serov02109dd2016-09-23 17:17:54 +01005912Location LocationsBuilderARMVIXL::ArmEncodableConstantOrRegister(HInstruction* constant,
5913 Opcode opcode) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005914 DCHECK(!DataType::IsFloatingPointType(constant->GetType()));
Artem Serov02109dd2016-09-23 17:17:54 +01005915 if (constant->IsConstant() &&
5916 CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) {
5917 return Location::ConstantLocation(constant->AsConstant());
5918 }
5919 return Location::RequiresRegister();
5920}
5921
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005922static bool CanEncode32BitConstantAsImmediate(
5923 CodeGeneratorARMVIXL* codegen,
5924 uint32_t value,
5925 Opcode opcode,
5926 vixl32::FlagsUpdate flags_update = vixl32::FlagsUpdate::DontCare) {
5927 ArmVIXLAssembler* assembler = codegen->GetAssembler();
5928 if (assembler->ShifterOperandCanHold(opcode, value, flags_update)) {
Artem Serov02109dd2016-09-23 17:17:54 +01005929 return true;
5930 }
5931 Opcode neg_opcode = kNoOperand;
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005932 uint32_t neg_value = 0;
Artem Serov02109dd2016-09-23 17:17:54 +01005933 switch (opcode) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005934 case AND: neg_opcode = BIC; neg_value = ~value; break;
5935 case ORR: neg_opcode = ORN; neg_value = ~value; break;
5936 case ADD: neg_opcode = SUB; neg_value = -value; break;
5937 case ADC: neg_opcode = SBC; neg_value = ~value; break;
5938 case SUB: neg_opcode = ADD; neg_value = -value; break;
5939 case SBC: neg_opcode = ADC; neg_value = ~value; break;
5940 case MOV: neg_opcode = MVN; neg_value = ~value; break;
Artem Serov02109dd2016-09-23 17:17:54 +01005941 default:
5942 return false;
5943 }
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005944
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005945 if (assembler->ShifterOperandCanHold(neg_opcode, neg_value, flags_update)) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005946 return true;
5947 }
5948
5949 return opcode == AND && IsPowerOfTwo(value + 1);
Artem Serov02109dd2016-09-23 17:17:54 +01005950}
5951
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005952bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode) {
5953 uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst));
5954 if (DataType::Is64BitType(input_cst->GetType())) {
5955 Opcode high_opcode = opcode;
5956 vixl32::FlagsUpdate low_flags_update = vixl32::FlagsUpdate::DontCare;
5957 switch (opcode) {
5958 case SUB:
5959 // Flip the operation to an ADD.
5960 value = -value;
5961 opcode = ADD;
5962 FALLTHROUGH_INTENDED;
5963 case ADD:
5964 if (Low32Bits(value) == 0u) {
5965 return CanEncode32BitConstantAsImmediate(codegen_, High32Bits(value), opcode);
5966 }
5967 high_opcode = ADC;
5968 low_flags_update = vixl32::FlagsUpdate::SetFlags;
5969 break;
5970 default:
5971 break;
5972 }
5973 return CanEncode32BitConstantAsImmediate(codegen_, High32Bits(value), high_opcode) &&
5974 CanEncode32BitConstantAsImmediate(codegen_, Low32Bits(value), opcode, low_flags_update);
5975 } else {
5976 return CanEncode32BitConstantAsImmediate(codegen_, Low32Bits(value), opcode);
5977 }
5978}
5979
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005980void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction,
5981 const FieldInfo& field_info) {
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005982 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005983
5984 LocationSummary* locations = instruction->GetLocations();
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00005985 vixl32::Register base = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005986 Location out = locations->Out();
5987 bool is_volatile = field_info.IsVolatile();
5988 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Vladimir Marko61b92282017-10-11 13:23:17 +01005989 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5990 DataType::Type load_type = instruction->GetType();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005991 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5992
Vladimir Marko61b92282017-10-11 13:23:17 +01005993 switch (load_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005994 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005995 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005996 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005997 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005998 case DataType::Type::kInt16:
5999 case DataType::Type::kInt32: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006000 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6001 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01006002 LoadOperandType operand_type = GetLoadOperandType(load_type);
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006003 GetAssembler()->LoadFromOffset(operand_type, RegisterFrom(out), base, offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006004 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006005 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006006 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006007
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006008 case DataType::Type::kReference: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006009 // /* HeapReference<Object> */ out = *(base + offset)
6010 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Vladimir Markodcd117e2018-04-19 11:54:00 +01006011 Location maybe_temp = (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location();
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006012 // Note that a potential implicit null check is handled in this
6013 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier call.
6014 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08006015 instruction, out, base, offset, maybe_temp, /* needs_null_check= */ true);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006016 if (is_volatile) {
6017 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6018 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006019 } else {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006020 {
6021 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6022 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6023 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
6024 codegen_->MaybeRecordImplicitNullCheck(instruction);
6025 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006026 if (is_volatile) {
6027 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6028 }
6029 // If read barriers are enabled, emit read barriers other than
6030 // Baker's using a slow path (and also unpoison the loaded
6031 // reference, if heap poisoning is enabled).
Nicolas Geoffray791df7a2021-01-23 13:28:56 +00006032 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006033 }
6034 break;
6035 }
6036
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006037 case DataType::Type::kInt64: {
6038 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6039 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006040 if (is_volatile && !atomic_ldrd_strd) {
6041 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
6042 } else {
6043 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
6044 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006045 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006046 break;
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006047 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006048
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006049 case DataType::Type::kFloat32: {
6050 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6051 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006052 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006053 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006054 break;
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006055 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006056
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006057 case DataType::Type::kFloat64: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006058 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6059 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006060 vixl32::DRegister out_dreg = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006061 if (is_volatile && !atomic_ldrd_strd) {
6062 vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
6063 vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
6064 GenerateWideAtomicLoad(base, offset, lo, hi);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006065 codegen_->MaybeRecordImplicitNullCheck(instruction);
6066 __ Vmov(out_dreg, lo, hi);
6067 } else {
6068 GetAssembler()->LoadDFromOffset(out_dreg, base, offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006069 codegen_->MaybeRecordImplicitNullCheck(instruction);
6070 }
6071 break;
6072 }
6073
Aart Bik66c158e2018-01-31 12:55:04 -08006074 case DataType::Type::kUint32:
6075 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006076 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01006077 LOG(FATAL) << "Unreachable type " << load_type;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006078 UNREACHABLE();
6079 }
6080
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006081 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01006082 if (load_type == DataType::Type::kReference) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006083 // Memory barriers, in the case of references, are also handled
6084 // in the previous switch statement.
6085 } else {
6086 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6087 }
6088 }
6089}
6090
6091void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6092 HandleFieldSet(instruction, instruction->GetFieldInfo());
6093}
6094
6095void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6096 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
6097}
6098
6099void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6100 HandleFieldGet(instruction, instruction->GetFieldInfo());
6101}
6102
6103void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6104 HandleFieldGet(instruction, instruction->GetFieldInfo());
6105}
6106
6107void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6108 HandleFieldGet(instruction, instruction->GetFieldInfo());
6109}
6110
6111void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6112 HandleFieldGet(instruction, instruction->GetFieldInfo());
6113}
6114
Scott Wakelingc34dba72016-10-03 10:14:44 +01006115void LocationsBuilderARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6116 HandleFieldSet(instruction, instruction->GetFieldInfo());
6117}
6118
6119void InstructionCodeGeneratorARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6120 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
6121}
6122
Vladimir Marko552a1342017-10-31 10:56:47 +00006123void LocationsBuilderARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
6124 codegen_->CreateStringBuilderAppendLocations(instruction, LocationFrom(r0));
6125}
6126
6127void InstructionCodeGeneratorARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
6128 __ Mov(r0, instruction->GetFormat()->GetValue());
6129 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
6130}
6131
Artem Serovcfbe9132016-10-14 15:58:56 +01006132void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldGet(
6133 HUnresolvedInstanceFieldGet* instruction) {
6134 FieldAccessCallingConventionARMVIXL calling_convention;
6135 codegen_->CreateUnresolvedFieldLocationSummary(
6136 instruction, instruction->GetFieldType(), calling_convention);
6137}
6138
6139void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldGet(
6140 HUnresolvedInstanceFieldGet* instruction) {
6141 FieldAccessCallingConventionARMVIXL calling_convention;
6142 codegen_->GenerateUnresolvedFieldAccess(instruction,
6143 instruction->GetFieldType(),
6144 instruction->GetFieldIndex(),
6145 instruction->GetDexPc(),
6146 calling_convention);
6147}
6148
6149void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldSet(
6150 HUnresolvedInstanceFieldSet* instruction) {
6151 FieldAccessCallingConventionARMVIXL calling_convention;
6152 codegen_->CreateUnresolvedFieldLocationSummary(
6153 instruction, instruction->GetFieldType(), calling_convention);
6154}
6155
6156void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldSet(
6157 HUnresolvedInstanceFieldSet* instruction) {
6158 FieldAccessCallingConventionARMVIXL calling_convention;
6159 codegen_->GenerateUnresolvedFieldAccess(instruction,
6160 instruction->GetFieldType(),
6161 instruction->GetFieldIndex(),
6162 instruction->GetDexPc(),
6163 calling_convention);
6164}
6165
6166void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldGet(
6167 HUnresolvedStaticFieldGet* instruction) {
6168 FieldAccessCallingConventionARMVIXL calling_convention;
6169 codegen_->CreateUnresolvedFieldLocationSummary(
6170 instruction, instruction->GetFieldType(), calling_convention);
6171}
6172
6173void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldGet(
6174 HUnresolvedStaticFieldGet* instruction) {
6175 FieldAccessCallingConventionARMVIXL calling_convention;
6176 codegen_->GenerateUnresolvedFieldAccess(instruction,
6177 instruction->GetFieldType(),
6178 instruction->GetFieldIndex(),
6179 instruction->GetDexPc(),
6180 calling_convention);
6181}
6182
6183void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldSet(
6184 HUnresolvedStaticFieldSet* instruction) {
6185 FieldAccessCallingConventionARMVIXL calling_convention;
6186 codegen_->CreateUnresolvedFieldLocationSummary(
6187 instruction, instruction->GetFieldType(), calling_convention);
6188}
6189
6190void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldSet(
6191 HUnresolvedStaticFieldSet* instruction) {
6192 FieldAccessCallingConventionARMVIXL calling_convention;
6193 codegen_->GenerateUnresolvedFieldAccess(instruction,
6194 instruction->GetFieldType(),
6195 instruction->GetFieldIndex(),
6196 instruction->GetDexPc(),
6197 calling_convention);
6198}
6199
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006200void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) {
Artem Serov657022c2016-11-23 14:19:38 +00006201 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006202 locations->SetInAt(0, Location::RequiresRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006203}
6204
6205void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
6206 if (CanMoveNullCheckToUser(instruction)) {
6207 return;
6208 }
6209
6210 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames374ddf32016-11-04 10:40:49 +00006211 // Ensure the pc position is recorded immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00006212 ExactAssemblyScope aas(GetVIXLAssembler(),
6213 vixl32::kMaxInstructionSizeInBytes,
6214 CodeBufferCheckScope::kMaximumSize);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006215 __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
6216 RecordPcInfo(instruction, instruction->GetDexPc());
6217}
6218
6219void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
6220 NullCheckSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006221 new (GetScopedAllocator()) NullCheckSlowPathARMVIXL(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006222 AddSlowPath(slow_path);
xueliang.zhongf51bc622016-11-04 09:23:32 +00006223 __ CompareAndBranchIfZero(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006224}
6225
6226void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) {
6227 codegen_->GenerateNullCheck(instruction);
6228}
6229
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006230void CodeGeneratorARMVIXL::LoadFromShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +01006231 Location out_loc,
6232 vixl32::Register base,
6233 vixl32::Register reg_index,
6234 vixl32::Condition cond) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006235 uint32_t shift_count = DataType::SizeShift(type);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006236 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
6237
6238 switch (type) {
Vladimir Marko61b92282017-10-11 13:23:17 +01006239 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006240 case DataType::Type::kUint8:
Vladimir Marko61b92282017-10-11 13:23:17 +01006241 __ Ldrb(cond, RegisterFrom(out_loc), mem_address);
6242 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006243 case DataType::Type::kInt8:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006244 __ Ldrsb(cond, RegisterFrom(out_loc), mem_address);
6245 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006246 case DataType::Type::kUint16:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006247 __ Ldrh(cond, RegisterFrom(out_loc), mem_address);
6248 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006249 case DataType::Type::kInt16:
6250 __ Ldrsh(cond, RegisterFrom(out_loc), mem_address);
6251 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006252 case DataType::Type::kReference:
6253 case DataType::Type::kInt32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006254 __ Ldr(cond, RegisterFrom(out_loc), mem_address);
6255 break;
6256 // T32 doesn't support LoadFromShiftedRegOffset mem address mode for these types.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006257 case DataType::Type::kInt64:
6258 case DataType::Type::kFloat32:
6259 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006260 default:
6261 LOG(FATAL) << "Unreachable type " << type;
6262 UNREACHABLE();
6263 }
6264}
6265
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006266void CodeGeneratorARMVIXL::StoreToShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +01006267 Location loc,
6268 vixl32::Register base,
6269 vixl32::Register reg_index,
6270 vixl32::Condition cond) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006271 uint32_t shift_count = DataType::SizeShift(type);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006272 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
6273
6274 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006275 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006276 case DataType::Type::kUint8:
6277 case DataType::Type::kInt8:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006278 __ Strb(cond, RegisterFrom(loc), mem_address);
6279 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006280 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006281 case DataType::Type::kInt16:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006282 __ Strh(cond, RegisterFrom(loc), mem_address);
6283 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006284 case DataType::Type::kReference:
6285 case DataType::Type::kInt32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006286 __ Str(cond, RegisterFrom(loc), mem_address);
6287 break;
6288 // T32 doesn't support StoreToShiftedRegOffset mem address mode for these types.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006289 case DataType::Type::kInt64:
6290 case DataType::Type::kFloat32:
6291 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006292 default:
6293 LOG(FATAL) << "Unreachable type " << type;
6294 UNREACHABLE();
6295 }
6296}
6297
6298void LocationsBuilderARMVIXL::VisitArrayGet(HArrayGet* instruction) {
6299 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006300 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006301 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006302 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
6303 object_array_get_with_read_barrier
6304 ? LocationSummary::kCallOnSlowPath
6305 : LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006306 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006307 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006308 }
6309 locations->SetInAt(0, Location::RequiresRegister());
6310 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006311 if (DataType::IsFloatingPointType(instruction->GetType())) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006312 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6313 } else {
6314 // The output overlaps in the case of an object array get with
6315 // read barriers enabled: we do not want the move to overwrite the
6316 // array's location, as we need it to emit the read barrier.
6317 locations->SetOut(
6318 Location::RequiresRegister(),
6319 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
6320 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006321 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01006322 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006323 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01006324 // We need a temporary register for the read barrier load in
6325 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier()
6326 // only if the offset is too big.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006327 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
6328 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006329 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006330 if (offset >= kReferenceLoadMinFarOffset) {
6331 locations->AddTemp(Location::RequiresRegister());
6332 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006333 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01006334 // We need a non-scratch temporary for the array data pointer in
6335 // CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006336 locations->AddTemp(Location::RequiresRegister());
6337 }
6338 } else if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6339 // Also need a temporary for String compression feature.
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006340 locations->AddTemp(Location::RequiresRegister());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006341 }
6342}
6343
6344void InstructionCodeGeneratorARMVIXL::VisitArrayGet(HArrayGet* instruction) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006345 LocationSummary* locations = instruction->GetLocations();
6346 Location obj_loc = locations->InAt(0);
6347 vixl32::Register obj = InputRegisterAt(instruction, 0);
6348 Location index = locations->InAt(1);
6349 Location out_loc = locations->Out();
6350 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006351 DataType::Type type = instruction->GetType();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006352 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
6353 instruction->IsStringCharAt();
6354 HInstruction* array_instr = instruction->GetArray();
6355 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006356
6357 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006358 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006359 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006360 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006361 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006362 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006363 case DataType::Type::kInt32: {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006364 vixl32::Register length;
6365 if (maybe_compressed_char_at) {
6366 length = RegisterFrom(locations->GetTemp(0));
6367 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006368 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6369 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006370 GetAssembler()->LoadFromOffset(kLoadWord, length, obj, count_offset);
6371 codegen_->MaybeRecordImplicitNullCheck(instruction);
6372 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006373 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006374 int32_t const_index = Int32ConstantFrom(index);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006375 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006376 vixl32::Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006377 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006378 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
6379 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6380 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe3db70682018-12-26 15:12:03 -08006381 __ B(cs, &uncompressed_load, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006382 GetAssembler()->LoadFromOffset(kLoadUnsignedByte,
6383 RegisterFrom(out_loc),
6384 obj,
6385 data_offset + const_index);
Anton Kirilov6f644202017-02-27 18:29:45 +00006386 __ B(final_label);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006387 __ Bind(&uncompressed_load);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006388 GetAssembler()->LoadFromOffset(GetLoadOperandType(DataType::Type::kUint16),
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006389 RegisterFrom(out_loc),
6390 obj,
6391 data_offset + (const_index << 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00006392 if (done.IsReferenced()) {
6393 __ Bind(&done);
6394 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006395 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006396 uint32_t full_offset = data_offset + (const_index << DataType::SizeShift(type));
Scott Wakelingc34dba72016-10-03 10:14:44 +01006397
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006398 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6399 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006400 LoadOperandType load_type = GetLoadOperandType(type);
6401 GetAssembler()->LoadFromOffset(load_type, RegisterFrom(out_loc), obj, full_offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006402 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006403 }
6404 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006405 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006406 vixl32::Register temp = temps.Acquire();
6407
6408 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006409 // We do not need to compute the intermediate address from the array: the
6410 // input instruction has done it already. See the comment in
6411 // `TryExtractArrayAccessAddress()`.
6412 if (kIsDebugBuild) {
6413 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006414 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006415 }
6416 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006417 } else {
6418 __ Add(temp, obj, data_offset);
6419 }
6420 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006421 vixl32::Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006422 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006423 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
6424 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6425 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe3db70682018-12-26 15:12:03 -08006426 __ B(cs, &uncompressed_load, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006427 __ Ldrb(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 0));
Anton Kirilov6f644202017-02-27 18:29:45 +00006428 __ B(final_label);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006429 __ Bind(&uncompressed_load);
6430 __ Ldrh(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00006431 if (done.IsReferenced()) {
6432 __ Bind(&done);
6433 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006434 } else {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006435 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6436 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006437 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006438 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006439 }
6440 }
6441 break;
6442 }
6443
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006444 case DataType::Type::kReference: {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006445 // The read barrier instrumentation of object ArrayGet
6446 // instructions does not support the HIntermediateAddress
6447 // instruction.
6448 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
6449
Scott Wakelingc34dba72016-10-03 10:14:44 +01006450 static_assert(
6451 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6452 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6453 // /* HeapReference<Object> */ out =
6454 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6455 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006456 // Note that a potential implicit null check is handled in this
6457 // CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006458 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
6459 if (index.IsConstant()) {
6460 // Array load with a constant index can be treated as a field load.
Vladimir Markodcd117e2018-04-19 11:54:00 +01006461 Location maybe_temp =
6462 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006463 data_offset += Int32ConstantFrom(index) << DataType::SizeShift(type);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006464 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6465 out_loc,
6466 obj,
6467 data_offset,
Vladimir Markodcd117e2018-04-19 11:54:00 +01006468 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08006469 /* needs_null_check= */ false);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006470 } else {
Vladimir Markodcd117e2018-04-19 11:54:00 +01006471 Location temp = locations->GetTemp(0);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006472 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08006473 out_loc, obj, data_offset, index, temp, /* needs_null_check= */ false);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006474 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006475 } else {
6476 vixl32::Register out = OutputRegister(instruction);
6477 if (index.IsConstant()) {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006478 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
6479 {
6480 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6481 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6482 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
6483 codegen_->MaybeRecordImplicitNullCheck(instruction);
6484 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006485 // If read barriers are enabled, emit read barriers other than
6486 // Baker's using a slow path (and also unpoison the loaded
6487 // reference, if heap poisoning is enabled).
6488 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
6489 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006490 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006491 vixl32::Register temp = temps.Acquire();
6492
6493 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006494 // We do not need to compute the intermediate address from the array: the
6495 // input instruction has done it already. See the comment in
6496 // `TryExtractArrayAccessAddress()`.
6497 if (kIsDebugBuild) {
6498 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006499 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006500 }
6501 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006502 } else {
6503 __ Add(temp, obj, data_offset);
6504 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006505 {
6506 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6507 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6508 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
6509 temps.Close();
6510 codegen_->MaybeRecordImplicitNullCheck(instruction);
6511 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006512 // If read barriers are enabled, emit read barriers other than
6513 // Baker's using a slow path (and also unpoison the loaded
6514 // reference, if heap poisoning is enabled).
6515 codegen_->MaybeGenerateReadBarrierSlow(
6516 instruction, out_loc, out_loc, obj_loc, data_offset, index);
6517 }
6518 }
6519 break;
6520 }
6521
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006522 case DataType::Type::kInt64: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006523 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6524 // As two macro instructions can be emitted the max size is doubled.
6525 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006526 if (index.IsConstant()) {
6527 size_t offset =
Anton Kirilov644032c2016-12-06 17:51:43 +00006528 (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006529 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), obj, offset);
6530 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006531 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006532 vixl32::Register temp = temps.Acquire();
6533 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6534 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), temp, data_offset);
6535 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006536 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006537 break;
6538 }
6539
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006540 case DataType::Type::kFloat32: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006541 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6542 // As two macro instructions can be emitted the max size is doubled.
6543 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006544 vixl32::SRegister out = SRegisterFrom(out_loc);
6545 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006546 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006547 GetAssembler()->LoadSFromOffset(out, obj, offset);
6548 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006549 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006550 vixl32::Register temp = temps.Acquire();
6551 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
6552 GetAssembler()->LoadSFromOffset(out, temp, data_offset);
6553 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006554 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006555 break;
6556 }
6557
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006558 case DataType::Type::kFloat64: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006559 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
6560 // As two macro instructions can be emitted the max size is doubled.
6561 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006562 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006563 size_t offset = (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006564 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), obj, offset);
6565 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006566 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006567 vixl32::Register temp = temps.Acquire();
6568 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6569 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), temp, data_offset);
6570 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006571 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006572 break;
6573 }
6574
Aart Bik66c158e2018-01-31 12:55:04 -08006575 case DataType::Type::kUint32:
6576 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006577 case DataType::Type::kVoid:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006578 LOG(FATAL) << "Unreachable type " << type;
6579 UNREACHABLE();
6580 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006581}
6582
6583void LocationsBuilderARMVIXL::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006584 DataType::Type value_type = instruction->GetComponentType();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006585
6586 bool needs_write_barrier =
6587 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006588 bool needs_type_check = instruction->NeedsTypeCheck();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006589
Vladimir Markoca6fff82017-10-03 14:49:14 +01006590 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Scott Wakelingc34dba72016-10-03 10:14:44 +01006591 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006592 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006593
6594 locations->SetInAt(0, Location::RequiresRegister());
6595 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006596 if (DataType::IsFloatingPointType(value_type)) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006597 locations->SetInAt(2, Location::RequiresFpuRegister());
6598 } else {
6599 locations->SetInAt(2, Location::RequiresRegister());
6600 }
6601 if (needs_write_barrier) {
6602 // Temporary registers for the write barrier.
6603 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
6604 locations->AddTemp(Location::RequiresRegister());
6605 }
6606}
6607
6608void InstructionCodeGeneratorARMVIXL::VisitArraySet(HArraySet* instruction) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006609 LocationSummary* locations = instruction->GetLocations();
6610 vixl32::Register array = InputRegisterAt(instruction, 0);
6611 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006612 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006613 bool needs_type_check = instruction->NeedsTypeCheck();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006614 bool needs_write_barrier =
6615 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
6616 uint32_t data_offset =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006617 mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006618 Location value_loc = locations->InAt(2);
6619 HInstruction* array_instr = instruction->GetArray();
6620 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006621
6622 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006623 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006624 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006625 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006626 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006627 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006628 case DataType::Type::kInt32: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006629 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006630 int32_t const_index = Int32ConstantFrom(index);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006631 uint32_t full_offset =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006632 data_offset + (const_index << DataType::SizeShift(value_type));
Scott Wakelingc34dba72016-10-03 10:14:44 +01006633 StoreOperandType store_type = GetStoreOperandType(value_type);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006634 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6635 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006636 GetAssembler()->StoreToOffset(store_type, RegisterFrom(value_loc), array, full_offset);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006637 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006638 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006639 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006640 vixl32::Register temp = temps.Acquire();
6641
6642 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006643 // We do not need to compute the intermediate address from the array: the
6644 // input instruction has done it already. See the comment in
6645 // `TryExtractArrayAccessAddress()`.
6646 if (kIsDebugBuild) {
6647 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006648 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006649 }
6650 temp = array;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006651 } else {
6652 __ Add(temp, array, data_offset);
6653 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006654 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6655 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006656 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006657 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006658 }
6659 break;
6660 }
6661
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006662 case DataType::Type::kReference: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006663 vixl32::Register value = RegisterFrom(value_loc);
6664 // TryExtractArrayAccessAddress optimization is never applied for non-primitive ArraySet.
6665 // See the comment in instruction_simplifier_shared.cc.
6666 DCHECK(!has_intermediate_address);
6667
6668 if (instruction->InputAt(2)->IsNullConstant()) {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006669 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6670 // As two macro instructions can be emitted the max size is doubled.
6671 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006672 // Just setting null.
6673 if (index.IsConstant()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006674 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006675 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
6676 } else {
6677 DCHECK(index.IsRegister()) << index;
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006678 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006679 vixl32::Register temp = temps.Acquire();
6680 __ Add(temp, array, data_offset);
6681 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
6682 }
6683 codegen_->MaybeRecordImplicitNullCheck(instruction);
6684 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006685 DCHECK(!needs_type_check);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006686 break;
6687 }
6688
6689 DCHECK(needs_write_barrier);
6690 Location temp1_loc = locations->GetTemp(0);
6691 vixl32::Register temp1 = RegisterFrom(temp1_loc);
6692 Location temp2_loc = locations->GetTemp(1);
6693 vixl32::Register temp2 = RegisterFrom(temp2_loc);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006694
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006695 bool can_value_be_null = instruction->GetValueCanBeNull();
6696 vixl32::Label do_store;
6697 if (can_value_be_null) {
6698 __ CompareAndBranchIfZero(value, &do_store, /* is_far_target= */ false);
6699 }
6700
6701 SlowPathCodeARMVIXL* slow_path = nullptr;
6702 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006703 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARMVIXL(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006704 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006705
6706 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6707 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6708 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006709
6710 // Note that when read barriers are enabled, the type checks
6711 // are performed without read barriers. This is fine, even in
6712 // the case where a class object is in the from-space after
6713 // the flip, as a comparison involving such a type would not
6714 // produce a false positive; it may of course produce a false
6715 // negative, in which case we would take the ArraySet slow
6716 // path.
6717
Alexandre Rames374ddf32016-11-04 10:40:49 +00006718 {
6719 // Ensure we record the pc position immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00006720 ExactAssemblyScope aas(GetVIXLAssembler(),
6721 vixl32::kMaxInstructionSizeInBytes,
6722 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006723 // /* HeapReference<Class> */ temp1 = array->klass_
6724 __ ldr(temp1, MemOperand(array, class_offset));
6725 codegen_->MaybeRecordImplicitNullCheck(instruction);
6726 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006727 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
6728
6729 // /* HeapReference<Class> */ temp1 = temp1->component_type_
6730 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
6731 // /* HeapReference<Class> */ temp2 = value->klass_
6732 GetAssembler()->LoadFromOffset(kLoadWord, temp2, value, class_offset);
6733 // If heap poisoning is enabled, no need to unpoison `temp1`
6734 // nor `temp2`, as we are comparing two poisoned references.
6735 __ Cmp(temp1, temp2);
6736
6737 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006738 vixl32::Label do_put;
6739 __ B(eq, &do_put, /* is_far_target= */ false);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006740 // If heap poisoning is enabled, the `temp1` reference has
6741 // not been unpoisoned yet; unpoison it now.
6742 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
6743
6744 // /* HeapReference<Class> */ temp1 = temp1->super_class_
6745 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
6746 // If heap poisoning is enabled, no need to unpoison
6747 // `temp1`, as we are comparing against null below.
xueliang.zhongf51bc622016-11-04 09:23:32 +00006748 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006749 __ Bind(&do_put);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006750 } else {
6751 __ B(ne, slow_path->GetEntryLabel());
6752 }
6753 }
6754
Vladimir Markoac3fcff2020-11-17 12:17:58 +00006755 codegen_->MarkGCCard(temp1, temp2, array, value, /* value_can_be_null= */ false);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006756
6757 if (can_value_be_null) {
6758 DCHECK(do_store.IsReferenced());
6759 __ Bind(&do_store);
6760 }
6761
Scott Wakelingc34dba72016-10-03 10:14:44 +01006762 vixl32::Register source = value;
6763 if (kPoisonHeapReferences) {
6764 // Note that in the case where `value` is a null reference,
6765 // we do not enter this block, as a null reference does not
6766 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006767 DCHECK_EQ(value_type, DataType::Type::kReference);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006768 __ Mov(temp1, value);
6769 GetAssembler()->PoisonHeapReference(temp1);
6770 source = temp1;
6771 }
6772
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006773 {
6774 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6775 // As two macro instructions can be emitted the max size is doubled.
6776 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
6777 if (index.IsConstant()) {
6778 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
6779 GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
6780 } else {
6781 DCHECK(index.IsRegister()) << index;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006782
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006783 UseScratchRegisterScope temps(GetVIXLAssembler());
6784 vixl32::Register temp = temps.Acquire();
6785 __ Add(temp, array, data_offset);
6786 codegen_->StoreToShiftedRegOffset(value_type,
6787 LocationFrom(source),
6788 temp,
6789 RegisterFrom(index));
6790 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006791
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006792 if (can_value_be_null || !needs_type_check) {
6793 codegen_->MaybeRecordImplicitNullCheck(instruction);
6794 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006795 }
6796
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006797 if (slow_path != nullptr) {
6798 __ Bind(slow_path->GetExitLabel());
6799 }
6800
Scott Wakelingc34dba72016-10-03 10:14:44 +01006801 break;
6802 }
6803
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006804 case DataType::Type::kInt64: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006805 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6806 // As two macro instructions can be emitted the max size is doubled.
6807 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006808 Location value = locations->InAt(2);
6809 if (index.IsConstant()) {
6810 size_t offset =
Anton Kirilov644032c2016-12-06 17:51:43 +00006811 (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006812 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), array, offset);
6813 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006814 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006815 vixl32::Register temp = temps.Acquire();
6816 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6817 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), temp, data_offset);
6818 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006819 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006820 break;
6821 }
6822
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006823 case DataType::Type::kFloat32: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006824 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6825 // As two macro instructions can be emitted the max size is doubled.
6826 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006827 Location value = locations->InAt(2);
6828 DCHECK(value.IsFpuRegister());
6829 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006830 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006831 GetAssembler()->StoreSToOffset(SRegisterFrom(value), array, offset);
6832 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006833 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006834 vixl32::Register temp = temps.Acquire();
6835 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
6836 GetAssembler()->StoreSToOffset(SRegisterFrom(value), temp, data_offset);
6837 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006838 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006839 break;
6840 }
6841
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006842 case DataType::Type::kFloat64: {
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006843 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
6844 // As two macro instructions can be emitted the max size is doubled.
6845 EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006846 Location value = locations->InAt(2);
6847 DCHECK(value.IsFpuRegisterPair());
6848 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006849 size_t offset = (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006850 GetAssembler()->StoreDToOffset(DRegisterFrom(value), array, offset);
6851 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006852 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006853 vixl32::Register temp = temps.Acquire();
6854 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6855 GetAssembler()->StoreDToOffset(DRegisterFrom(value), temp, data_offset);
6856 }
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +01006857 codegen_->MaybeRecordImplicitNullCheck(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006858 break;
6859 }
6860
Aart Bik66c158e2018-01-31 12:55:04 -08006861 case DataType::Type::kUint32:
6862 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006863 case DataType::Type::kVoid:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006864 LOG(FATAL) << "Unreachable type " << value_type;
6865 UNREACHABLE();
6866 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006867}
6868
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006869void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {
6870 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006871 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006872 locations->SetInAt(0, Location::RequiresRegister());
6873 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6874}
6875
6876void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) {
6877 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
6878 vixl32::Register obj = InputRegisterAt(instruction, 0);
6879 vixl32::Register out = OutputRegister(instruction);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006880 {
Artem Serov0fb37192016-12-06 18:13:40 +00006881 ExactAssemblyScope aas(GetVIXLAssembler(),
6882 vixl32::kMaxInstructionSizeInBytes,
6883 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006884 __ ldr(out, MemOperand(obj, offset));
6885 codegen_->MaybeRecordImplicitNullCheck(instruction);
6886 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006887 // Mask out compression flag from String's array length.
6888 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006889 __ Lsr(out, out, 1u);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006890 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006891}
6892
Artem Serov2bbc9532016-10-21 11:51:50 +01006893void LocationsBuilderARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006894 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006895 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov2bbc9532016-10-21 11:51:50 +01006896
6897 locations->SetInAt(0, Location::RequiresRegister());
6898 locations->SetInAt(1, Location::RegisterOrConstant(instruction->GetOffset()));
6899 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6900}
6901
6902void InstructionCodeGeneratorARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
6903 vixl32::Register out = OutputRegister(instruction);
6904 vixl32::Register first = InputRegisterAt(instruction, 0);
6905 Location second = instruction->GetLocations()->InAt(1);
6906
Artem Serov2bbc9532016-10-21 11:51:50 +01006907 if (second.IsRegister()) {
6908 __ Add(out, first, RegisterFrom(second));
6909 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00006910 __ Add(out, first, Int32ConstantFrom(second));
Artem Serov2bbc9532016-10-21 11:51:50 +01006911 }
6912}
6913
Artem Serove1811ed2017-04-27 16:50:47 +01006914void LocationsBuilderARMVIXL::VisitIntermediateAddressIndex(
6915 HIntermediateAddressIndex* instruction) {
6916 LOG(FATAL) << "Unreachable " << instruction->GetId();
6917}
6918
6919void InstructionCodeGeneratorARMVIXL::VisitIntermediateAddressIndex(
6920 HIntermediateAddressIndex* instruction) {
6921 LOG(FATAL) << "Unreachable " << instruction->GetId();
6922}
6923
Scott Wakelingc34dba72016-10-03 10:14:44 +01006924void LocationsBuilderARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
6925 RegisterSet caller_saves = RegisterSet::Empty();
6926 InvokeRuntimeCallingConventionARMVIXL calling_convention;
6927 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
6928 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(1)));
6929 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Artem Serov2dd053d2017-03-08 14:54:06 +00006930
6931 HInstruction* index = instruction->InputAt(0);
6932 HInstruction* length = instruction->InputAt(1);
6933 // If both index and length are constants we can statically check the bounds. But if at least one
6934 // of them is not encodable ArmEncodableConstantOrRegister will create
6935 // Location::RequiresRegister() which is not desired to happen. Instead we create constant
6936 // locations.
6937 bool both_const = index->IsConstant() && length->IsConstant();
6938 locations->SetInAt(0, both_const
6939 ? Location::ConstantLocation(index->AsConstant())
6940 : ArmEncodableConstantOrRegister(index, CMP));
6941 locations->SetInAt(1, both_const
6942 ? Location::ConstantLocation(length->AsConstant())
6943 : ArmEncodableConstantOrRegister(length, CMP));
Scott Wakelingc34dba72016-10-03 10:14:44 +01006944}
6945
6946void InstructionCodeGeneratorARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
Artem Serov2dd053d2017-03-08 14:54:06 +00006947 LocationSummary* locations = instruction->GetLocations();
6948 Location index_loc = locations->InAt(0);
6949 Location length_loc = locations->InAt(1);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006950
Artem Serov2dd053d2017-03-08 14:54:06 +00006951 if (length_loc.IsConstant()) {
6952 int32_t length = Int32ConstantFrom(length_loc);
6953 if (index_loc.IsConstant()) {
6954 // BCE will remove the bounds check if we are guaranteed to pass.
6955 int32_t index = Int32ConstantFrom(index_loc);
6956 if (index < 0 || index >= length) {
6957 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006958 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006959 codegen_->AddSlowPath(slow_path);
6960 __ B(slow_path->GetEntryLabel());
6961 } else {
6962 // Some optimization after BCE may have generated this, and we should not
6963 // generate a bounds check if it is a valid range.
6964 }
6965 return;
6966 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006967
Artem Serov2dd053d2017-03-08 14:54:06 +00006968 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006969 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006970 __ Cmp(RegisterFrom(index_loc), length);
6971 codegen_->AddSlowPath(slow_path);
6972 __ B(hs, slow_path->GetEntryLabel());
6973 } else {
6974 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006975 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006976 __ Cmp(RegisterFrom(length_loc), InputOperandAt(instruction, 0));
6977 codegen_->AddSlowPath(slow_path);
6978 __ B(ls, slow_path->GetEntryLabel());
6979 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006980}
6981
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006982void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp,
6983 vixl32::Register card,
6984 vixl32::Register object,
6985 vixl32::Register value,
Vladimir Markoac3fcff2020-11-17 12:17:58 +00006986 bool value_can_be_null) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006987 vixl32::Label is_null;
Vladimir Markoac3fcff2020-11-17 12:17:58 +00006988 if (value_can_be_null) {
6989 __ CompareAndBranchIfZero(value, &is_null, /* is_far_target=*/ false);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006990 }
Roland Levillainc73f0522018-08-14 15:16:50 +01006991 // Load the address of the card table into `card`.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006992 GetAssembler()->LoadFromOffset(
6993 kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
Roland Levillainc73f0522018-08-14 15:16:50 +01006994 // Calculate the offset (in the card table) of the card corresponding to
6995 // `object`.
Scott Wakelingb77051e2016-11-21 19:46:00 +00006996 __ Lsr(temp, object, Operand::From(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01006997 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
6998 // `object`'s card.
6999 //
7000 // Register `card` contains the address of the card table. Note that the card
7001 // table's base is biased during its creation so that it always starts at an
7002 // address whose least-significant byte is equal to `kCardDirty` (see
7003 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
7004 // below writes the `kCardDirty` (byte) value into the `object`'s card
7005 // (located at `card + object >> kCardShift`).
7006 //
7007 // This dual use of the value in register `card` (1. to calculate the location
7008 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
7009 // (no need to explicitly load `kCardDirty` as an immediate value).
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007010 __ Strb(card, MemOperand(card, temp));
Vladimir Markoac3fcff2020-11-17 12:17:58 +00007011 if (value_can_be_null) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007012 __ Bind(&is_null);
7013 }
7014}
7015
Scott Wakelingfe885462016-09-22 10:24:38 +01007016void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
7017 LOG(FATAL) << "Unreachable";
7018}
7019
7020void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01007021 if (instruction->GetNext()->IsSuspendCheck() &&
7022 instruction->GetBlock()->GetLoopInformation() != nullptr) {
7023 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
7024 // The back edge will generate the suspend check.
7025 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
7026 }
7027
Scott Wakelingfe885462016-09-22 10:24:38 +01007028 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
7029}
7030
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007031void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007032 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7033 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov657022c2016-11-23 14:19:38 +00007034 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007035}
7036
7037void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
7038 HBasicBlock* block = instruction->GetBlock();
7039 if (block->GetLoopInformation() != nullptr) {
7040 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
7041 // The back edge will generate the suspend check.
7042 return;
7043 }
7044 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
7045 // The goto will generate the suspend check.
7046 return;
7047 }
7048 GenerateSuspendCheck(instruction, nullptr);
Andra Danciua0130e82020-07-23 12:34:56 +00007049 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 14);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007050}
7051
7052void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction,
7053 HBasicBlock* successor) {
7054 SuspendCheckSlowPathARMVIXL* slow_path =
7055 down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath());
7056 if (slow_path == nullptr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007057 slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007058 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARMVIXL(instruction, successor);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007059 instruction->SetSlowPath(slow_path);
7060 codegen_->AddSlowPath(slow_path);
7061 if (successor != nullptr) {
7062 DCHECK(successor->IsLoopHeader());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007063 }
7064 } else {
7065 DCHECK_EQ(slow_path->GetSuccessor(), successor);
7066 }
7067
Anton Kirilovedb2ac32016-11-30 15:14:10 +00007068 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007069 vixl32::Register temp = temps.Acquire();
7070 GetAssembler()->LoadFromOffset(
7071 kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
7072 if (successor == nullptr) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00007073 __ CompareAndBranchIfNonZero(temp, slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007074 __ Bind(slow_path->GetReturnLabel());
7075 } else {
xueliang.zhongf51bc622016-11-04 09:23:32 +00007076 __ CompareAndBranchIfZero(temp, codegen_->GetLabelOf(successor));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007077 __ B(slow_path->GetEntryLabel());
7078 }
7079}
7080
Scott Wakelingfe885462016-09-22 10:24:38 +01007081ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const {
7082 return codegen_->GetAssembler();
7083}
7084
7085void ParallelMoveResolverARMVIXL::EmitMove(size_t index) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007086 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Scott Wakelingfe885462016-09-22 10:24:38 +01007087 MoveOperands* move = moves_[index];
7088 Location source = move->GetSource();
7089 Location destination = move->GetDestination();
7090
7091 if (source.IsRegister()) {
7092 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007093 __ Mov(RegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01007094 } else if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007095 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01007096 } else {
7097 DCHECK(destination.IsStackSlot());
7098 GetAssembler()->StoreToOffset(kStoreWord,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007099 RegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01007100 sp,
7101 destination.GetStackIndex());
7102 }
7103 } else if (source.IsStackSlot()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007104 if (destination.IsRegister()) {
7105 GetAssembler()->LoadFromOffset(kLoadWord,
7106 RegisterFrom(destination),
7107 sp,
7108 source.GetStackIndex());
7109 } else if (destination.IsFpuRegister()) {
7110 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
7111 } else {
7112 DCHECK(destination.IsStackSlot());
7113 vixl32::Register temp = temps.Acquire();
7114 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
7115 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
7116 }
Scott Wakelingfe885462016-09-22 10:24:38 +01007117 } else if (source.IsFpuRegister()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01007118 if (destination.IsRegister()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01007119 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01007120 } else if (destination.IsFpuRegister()) {
7121 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
7122 } else {
7123 DCHECK(destination.IsStackSlot());
7124 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
7125 }
Scott Wakelingfe885462016-09-22 10:24:38 +01007126 } else if (source.IsDoubleStackSlot()) {
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007127 if (destination.IsDoubleStackSlot()) {
7128 vixl32::DRegister temp = temps.AcquireD();
7129 GetAssembler()->LoadDFromOffset(temp, sp, source.GetStackIndex());
7130 GetAssembler()->StoreDToOffset(temp, sp, destination.GetStackIndex());
7131 } else if (destination.IsRegisterPair()) {
7132 DCHECK(ExpectedPairLayout(destination));
7133 GetAssembler()->LoadFromOffset(
7134 kLoadWordPair, LowRegisterFrom(destination), sp, source.GetStackIndex());
7135 } else {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01007136 DCHECK(destination.IsFpuRegisterPair()) << destination;
7137 GetAssembler()->LoadDFromOffset(DRegisterFrom(destination), sp, source.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007138 }
Scott Wakelingfe885462016-09-22 10:24:38 +01007139 } else if (source.IsRegisterPair()) {
7140 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007141 __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source));
7142 __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01007143 } else if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01007144 __ Vmov(DRegisterFrom(destination), LowRegisterFrom(source), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01007145 } else {
7146 DCHECK(destination.IsDoubleStackSlot()) << destination;
7147 DCHECK(ExpectedPairLayout(source));
7148 GetAssembler()->StoreToOffset(kStoreWordPair,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007149 LowRegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01007150 sp,
7151 destination.GetStackIndex());
7152 }
7153 } else if (source.IsFpuRegisterPair()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01007154 if (destination.IsRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01007155 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), DRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01007156 } else if (destination.IsFpuRegisterPair()) {
7157 __ Vmov(DRegisterFrom(destination), DRegisterFrom(source));
7158 } else {
7159 DCHECK(destination.IsDoubleStackSlot()) << destination;
7160 GetAssembler()->StoreDToOffset(DRegisterFrom(source), sp, destination.GetStackIndex());
7161 }
Scott Wakelingfe885462016-09-22 10:24:38 +01007162 } else {
7163 DCHECK(source.IsConstant()) << source;
7164 HConstant* constant = source.GetConstant();
7165 if (constant->IsIntConstant() || constant->IsNullConstant()) {
7166 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
7167 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007168 __ Mov(RegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01007169 } else {
7170 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01007171 vixl32::Register temp = temps.Acquire();
7172 __ Mov(temp, value);
7173 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
7174 }
7175 } else if (constant->IsLongConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00007176 int64_t value = Int64ConstantFrom(source);
Scott Wakelingfe885462016-09-22 10:24:38 +01007177 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007178 __ Mov(LowRegisterFrom(destination), Low32Bits(value));
7179 __ Mov(HighRegisterFrom(destination), High32Bits(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01007180 } else {
7181 DCHECK(destination.IsDoubleStackSlot()) << destination;
Scott Wakelingfe885462016-09-22 10:24:38 +01007182 vixl32::Register temp = temps.Acquire();
7183 __ Mov(temp, Low32Bits(value));
7184 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
7185 __ Mov(temp, High32Bits(value));
7186 GetAssembler()->StoreToOffset(kStoreWord,
7187 temp,
7188 sp,
7189 destination.GetHighStackIndex(kArmWordSize));
7190 }
7191 } else if (constant->IsDoubleConstant()) {
7192 double value = constant->AsDoubleConstant()->GetValue();
7193 if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01007194 __ Vmov(DRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01007195 } else {
7196 DCHECK(destination.IsDoubleStackSlot()) << destination;
7197 uint64_t int_value = bit_cast<uint64_t, double>(value);
Scott Wakelingfe885462016-09-22 10:24:38 +01007198 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007199 __ Mov(temp, Low32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01007200 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007201 __ Mov(temp, High32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01007202 GetAssembler()->StoreToOffset(kStoreWord,
7203 temp,
7204 sp,
7205 destination.GetHighStackIndex(kArmWordSize));
7206 }
7207 } else {
7208 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
7209 float value = constant->AsFloatConstant()->GetValue();
7210 if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007211 __ Vmov(SRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01007212 } else {
7213 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01007214 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007215 __ Mov(temp, bit_cast<int32_t, float>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01007216 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
7217 }
7218 }
7219 }
7220}
7221
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007222void ParallelMoveResolverARMVIXL::Exchange(vixl32::Register reg, int mem) {
7223 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
7224 vixl32::Register temp = temps.Acquire();
7225 __ Mov(temp, reg);
7226 GetAssembler()->LoadFromOffset(kLoadWord, reg, sp, mem);
7227 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Scott Wakelingfe885462016-09-22 10:24:38 +01007228}
7229
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007230void ParallelMoveResolverARMVIXL::Exchange(int mem1, int mem2) {
7231 // TODO(VIXL32): Double check the performance of this implementation.
7232 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00007233 vixl32::Register temp1 = temps.Acquire();
7234 ScratchRegisterScope ensure_scratch(
7235 this, temp1.GetCode(), r0.GetCode(), codegen_->GetNumberOfCoreRegisters());
7236 vixl32::Register temp2(ensure_scratch.GetRegister());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007237
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00007238 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
7239 GetAssembler()->LoadFromOffset(kLoadWord, temp1, sp, mem1 + stack_offset);
7240 GetAssembler()->LoadFromOffset(kLoadWord, temp2, sp, mem2 + stack_offset);
7241 GetAssembler()->StoreToOffset(kStoreWord, temp1, sp, mem2 + stack_offset);
7242 GetAssembler()->StoreToOffset(kStoreWord, temp2, sp, mem1 + stack_offset);
Scott Wakelingfe885462016-09-22 10:24:38 +01007243}
7244
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007245void ParallelMoveResolverARMVIXL::EmitSwap(size_t index) {
7246 MoveOperands* move = moves_[index];
7247 Location source = move->GetSource();
7248 Location destination = move->GetDestination();
7249 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
7250
7251 if (source.IsRegister() && destination.IsRegister()) {
7252 vixl32::Register temp = temps.Acquire();
7253 DCHECK(!RegisterFrom(source).Is(temp));
7254 DCHECK(!RegisterFrom(destination).Is(temp));
7255 __ Mov(temp, RegisterFrom(destination));
7256 __ Mov(RegisterFrom(destination), RegisterFrom(source));
7257 __ Mov(RegisterFrom(source), temp);
7258 } else if (source.IsRegister() && destination.IsStackSlot()) {
7259 Exchange(RegisterFrom(source), destination.GetStackIndex());
7260 } else if (source.IsStackSlot() && destination.IsRegister()) {
7261 Exchange(RegisterFrom(destination), source.GetStackIndex());
7262 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00007263 Exchange(source.GetStackIndex(), destination.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007264 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00007265 vixl32::Register temp = temps.Acquire();
Anton Kirilovdda43962016-11-21 19:55:20 +00007266 __ Vmov(temp, SRegisterFrom(source));
7267 __ Vmov(SRegisterFrom(source), SRegisterFrom(destination));
7268 __ Vmov(SRegisterFrom(destination), temp);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007269 } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
7270 vixl32::DRegister temp = temps.AcquireD();
7271 __ Vmov(temp, LowRegisterFrom(source), HighRegisterFrom(source));
7272 __ Mov(LowRegisterFrom(source), LowRegisterFrom(destination));
7273 __ Mov(HighRegisterFrom(source), HighRegisterFrom(destination));
7274 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), temp);
7275 } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
7276 vixl32::Register low_reg = LowRegisterFrom(source.IsRegisterPair() ? source : destination);
7277 int mem = source.IsRegisterPair() ? destination.GetStackIndex() : source.GetStackIndex();
7278 DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
7279 vixl32::DRegister temp = temps.AcquireD();
7280 __ Vmov(temp, low_reg, vixl32::Register(low_reg.GetCode() + 1));
7281 GetAssembler()->LoadFromOffset(kLoadWordPair, low_reg, sp, mem);
7282 GetAssembler()->StoreDToOffset(temp, sp, mem);
7283 } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007284 vixl32::DRegister first = DRegisterFrom(source);
7285 vixl32::DRegister second = DRegisterFrom(destination);
7286 vixl32::DRegister temp = temps.AcquireD();
7287 __ Vmov(temp, first);
7288 __ Vmov(first, second);
7289 __ Vmov(second, temp);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007290 } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00007291 vixl32::DRegister reg = source.IsFpuRegisterPair()
7292 ? DRegisterFrom(source)
7293 : DRegisterFrom(destination);
7294 int mem = source.IsFpuRegisterPair()
7295 ? destination.GetStackIndex()
7296 : source.GetStackIndex();
7297 vixl32::DRegister temp = temps.AcquireD();
7298 __ Vmov(temp, reg);
7299 GetAssembler()->LoadDFromOffset(reg, sp, mem);
7300 GetAssembler()->StoreDToOffset(temp, sp, mem);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007301 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00007302 vixl32::SRegister reg = source.IsFpuRegister()
7303 ? SRegisterFrom(source)
7304 : SRegisterFrom(destination);
7305 int mem = source.IsFpuRegister()
7306 ? destination.GetStackIndex()
7307 : source.GetStackIndex();
7308 vixl32::Register temp = temps.Acquire();
7309 __ Vmov(temp, reg);
7310 GetAssembler()->LoadSFromOffset(reg, sp, mem);
7311 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01007312 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
7313 vixl32::DRegister temp1 = temps.AcquireD();
7314 vixl32::DRegister temp2 = temps.AcquireD();
7315 __ Vldr(temp1, MemOperand(sp, source.GetStackIndex()));
7316 __ Vldr(temp2, MemOperand(sp, destination.GetStackIndex()));
7317 __ Vstr(temp1, MemOperand(sp, destination.GetStackIndex()));
7318 __ Vstr(temp2, MemOperand(sp, source.GetStackIndex()));
7319 } else {
7320 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
7321 }
Scott Wakelingfe885462016-09-22 10:24:38 +01007322}
7323
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00007324void ParallelMoveResolverARMVIXL::SpillScratch(int reg) {
7325 __ Push(vixl32::Register(reg));
Scott Wakelingfe885462016-09-22 10:24:38 +01007326}
7327
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00007328void ParallelMoveResolverARMVIXL::RestoreScratch(int reg) {
7329 __ Pop(vixl32::Register(reg));
Scott Wakelingfe885462016-09-22 10:24:38 +01007330}
7331
Artem Serov02d37832016-10-25 15:25:33 +01007332HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
Artem Serovd4cc5b22016-11-04 11:19:09 +00007333 HLoadClass::LoadKind desired_class_load_kind) {
7334 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007335 case HLoadClass::LoadKind::kInvalid:
7336 LOG(FATAL) << "UNREACHABLE";
7337 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00007338 case HLoadClass::LoadKind::kReferrersClass:
7339 break;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007340 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007341 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007342 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko8f63f102020-09-28 12:10:28 +01007343 case HLoadClass::LoadKind::kBssEntryPublic:
7344 case HLoadClass::LoadKind::kBssEntryPackage:
Vladimir Marko695348f2020-05-19 14:42:02 +01007345 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007346 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007347 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007348 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01007349 DCHECK(GetCompilerOptions().IsJitCompiler());
Artem Serovc5fcb442016-12-02 19:19:58 +00007350 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007351 case HLoadClass::LoadKind::kRuntimeCall:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007352 break;
7353 }
7354 return desired_class_load_kind;
Artem Serov02d37832016-10-25 15:25:33 +01007355}
7356
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007357void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007358 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007359 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007360 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00007361 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007362 cls,
7363 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00007364 LocationFrom(r0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00007365 DCHECK(calling_convention.GetRegisterAt(0).Is(r0));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007366 return;
7367 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007368 DCHECK_EQ(cls->NeedsAccessCheck(),
7369 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
7370 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Scott Wakelingfe885462016-09-22 10:24:38 +01007371
Artem Serovd4cc5b22016-11-04 11:19:09 +00007372 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7373 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007374 ? LocationSummary::kCallOnSlowPath
7375 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01007376 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007377 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00007378 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Artem Serovd4cc5b22016-11-04 11:19:09 +00007379 }
7380
Vladimir Marko41559982017-01-06 14:04:23 +00007381 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007382 locations->SetInAt(0, Location::RequiresRegister());
7383 }
7384 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00007385 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7386 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7387 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007388 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00007389 } else {
7390 // For non-Baker read barrier we have a temp-clobbering call.
7391 }
7392 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007393}
7394
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007395// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7396// move.
7397void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007398 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007399 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007400 codegen_->GenerateLoadClassRuntimeCall(cls);
Andra Danciua0130e82020-07-23 12:34:56 +00007401 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 15);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007402 return;
7403 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007404 DCHECK_EQ(cls->NeedsAccessCheck(),
7405 load_kind == HLoadClass::LoadKind::kBssEntryPublic ||
7406 load_kind == HLoadClass::LoadKind::kBssEntryPackage);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007407
Vladimir Marko41559982017-01-06 14:04:23 +00007408 LocationSummary* locations = cls->GetLocations();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007409 Location out_loc = locations->Out();
7410 vixl32::Register out = OutputRegister(cls);
7411
Artem Serovd4cc5b22016-11-04 11:19:09 +00007412 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7413 ? kWithoutReadBarrier
7414 : kCompilerReadBarrierOption;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007415 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00007416 switch (load_kind) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007417 case HLoadClass::LoadKind::kReferrersClass: {
7418 DCHECK(!cls->CanCallRuntime());
7419 DCHECK(!cls->MustGenerateClinitCheck());
7420 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7421 vixl32::Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00007422 codegen_->GenerateGcRootFieldLoad(cls,
7423 out_loc,
7424 current_method,
7425 ArtMethod::DeclaringClassOffset().Int32Value(),
7426 read_barrier_option);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007427 break;
7428 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00007429 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01007430 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
7431 codegen_->GetCompilerOptions().IsBootImageExtension());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007432 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
7433 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007434 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007435 codegen_->EmitMovwMovtPlaceholder(labels, out);
7436 break;
7437 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007438 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007439 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7440 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Markode91ca92020-10-27 13:41:40 +00007441 codegen_->NewBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007442 codegen_->EmitMovwMovtPlaceholder(labels, out);
Andreas Gampe3db70682018-12-26 15:12:03 -08007443 __ Ldr(out, MemOperand(out, /* offset= */ 0));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007444 break;
7445 }
Vladimir Marko8f63f102020-09-28 12:10:28 +01007446 case HLoadClass::LoadKind::kBssEntry:
7447 case HLoadClass::LoadKind::kBssEntryPublic:
7448 case HLoadClass::LoadKind::kBssEntryPackage: {
7449 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels = codegen_->NewTypeBssEntryPatch(cls);
Vladimir Markof3c52b42017-11-17 17:32:12 +00007450 codegen_->EmitMovwMovtPlaceholder(labels, out);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01007451 // All aligned loads are implicitly atomic consume operations on ARM.
Andreas Gampe3db70682018-12-26 15:12:03 -08007452 codegen_->GenerateGcRootFieldLoad(cls, out_loc, out, /* offset= */ 0, read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007453 generate_null_check = true;
7454 break;
7455 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007456 case HLoadClass::LoadKind::kJitBootImageAddress: {
7457 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
7458 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
7459 DCHECK_NE(address, 0u);
7460 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7461 break;
7462 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007463 case HLoadClass::LoadKind::kJitTableAddress: {
Artem Serovc5fcb442016-12-02 19:19:58 +00007464 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
7465 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007466 cls->GetClass()));
Artem Serovc5fcb442016-12-02 19:19:58 +00007467 // /* GcRoot<mirror::Class> */ out = *out
Andreas Gampe3db70682018-12-26 15:12:03 -08007468 codegen_->GenerateGcRootFieldLoad(cls, out_loc, out, /* offset= */ 0, read_barrier_option);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007469 break;
7470 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007471 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007472 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007473 LOG(FATAL) << "UNREACHABLE";
7474 UNREACHABLE();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007475 }
7476
7477 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7478 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007479 LoadClassSlowPathARMVIXL* slow_path =
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007480 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARMVIXL(cls, cls);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007481 codegen_->AddSlowPath(slow_path);
7482 if (generate_null_check) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00007483 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007484 }
7485 if (cls->MustGenerateClinitCheck()) {
7486 GenerateClassInitializationCheck(slow_path, out);
7487 } else {
7488 __ Bind(slow_path->GetExitLabel());
7489 }
Andra Danciua0130e82020-07-23 12:34:56 +00007490 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 16);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007491 }
7492}
7493
Orion Hodsondbaa5c72018-05-10 08:22:46 +01007494void LocationsBuilderARMVIXL::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7495 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7496 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
7497 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
7498}
7499
7500void InstructionCodeGeneratorARMVIXL::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7501 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
7502}
7503
Orion Hodson18259d72018-04-12 11:18:23 +01007504void LocationsBuilderARMVIXL::VisitLoadMethodType(HLoadMethodType* load) {
7505 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7506 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
7507 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
7508}
7509
7510void InstructionCodeGeneratorARMVIXL::VisitLoadMethodType(HLoadMethodType* load) {
7511 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
7512}
7513
Artem Serov02d37832016-10-25 15:25:33 +01007514void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) {
7515 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007516 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Artem Serov02d37832016-10-25 15:25:33 +01007517 locations->SetInAt(0, Location::RequiresRegister());
7518 if (check->HasUses()) {
7519 locations->SetOut(Location::SameAsFirstInput());
7520 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007521 // Rely on the type initialization to save everything we need.
7522 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Artem Serov02d37832016-10-25 15:25:33 +01007523}
7524
7525void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) {
7526 // We assume the class is not null.
7527 LoadClassSlowPathARMVIXL* slow_path =
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007528 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARMVIXL(check->GetLoadClass(), check);
Artem Serov02d37832016-10-25 15:25:33 +01007529 codegen_->AddSlowPath(slow_path);
7530 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
7531}
7532
7533void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck(
7534 LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) {
7535 UseScratchRegisterScope temps(GetVIXLAssembler());
7536 vixl32::Register temp = temps.Acquire();
Vladimir Markodc682aa2018-01-04 18:42:57 +00007537 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
Vladimir Markobf121912019-06-04 13:49:05 +01007538 constexpr uint32_t shifted_visibly_initialized_value =
7539 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << status_lsb_position;
Vladimir Markodc682aa2018-01-04 18:42:57 +00007540
Vladimir Markobf121912019-06-04 13:49:05 +01007541 const size_t status_offset = mirror::Class::StatusOffset().SizeValue();
7542 GetAssembler()->LoadFromOffset(kLoadWord, temp, class_reg, status_offset);
7543 __ Cmp(temp, shifted_visibly_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00007544 __ B(lo, slow_path->GetEntryLabel());
Artem Serov02d37832016-10-25 15:25:33 +01007545 __ Bind(slow_path->GetExitLabel());
7546}
7547
Vladimir Marko175e7862018-03-27 09:03:13 +00007548void InstructionCodeGeneratorARMVIXL::GenerateBitstringTypeCheckCompare(
7549 HTypeCheckInstruction* check,
7550 vixl32::Register temp,
7551 vixl32::FlagsUpdate flags_update) {
7552 uint32_t path_to_root = check->GetBitstringPathToRoot();
7553 uint32_t mask = check->GetBitstringMask();
7554 DCHECK(IsPowerOfTwo(mask + 1));
7555 size_t mask_bits = WhichPowerOf2(mask + 1);
7556
7557 // Note that HInstanceOf shall check for zero value in `temp` but HCheckCast needs
7558 // the Z flag for BNE. This is indicated by the `flags_update` parameter.
7559 if (mask_bits == 16u) {
7560 // Load only the bitstring part of the status word.
7561 __ Ldrh(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
7562 // Check if the bitstring bits are equal to `path_to_root`.
7563 if (flags_update == SetFlags) {
7564 __ Cmp(temp, path_to_root);
7565 } else {
7566 __ Sub(temp, temp, path_to_root);
7567 }
7568 } else {
7569 // /* uint32_t */ temp = temp->status_
7570 __ Ldr(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
7571 if (GetAssembler()->ShifterOperandCanHold(SUB, path_to_root)) {
7572 // Compare the bitstring bits using SUB.
7573 __ Sub(temp, temp, path_to_root);
7574 // Shift out bits that do not contribute to the comparison.
7575 __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7576 } else if (IsUint<16>(path_to_root)) {
7577 if (temp.IsLow()) {
7578 // Note: Optimized for size but contains one more dependent instruction than necessary.
7579 // MOVW+SUB(register) would be 8 bytes unless we find a low-reg temporary but the
7580 // macro assembler would use the high reg IP for the constant by default.
7581 // Compare the bitstring bits using SUB.
7582 __ Sub(temp, temp, path_to_root & 0x00ffu); // 16-bit SUB (immediate) T2
7583 __ Sub(temp, temp, path_to_root & 0xff00u); // 32-bit SUB (immediate) T3
7584 // Shift out bits that do not contribute to the comparison.
7585 __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7586 } else {
7587 // Extract the bitstring bits.
7588 __ Ubfx(temp, temp, 0, mask_bits);
7589 // Check if the bitstring bits are equal to `path_to_root`.
7590 if (flags_update == SetFlags) {
7591 __ Cmp(temp, path_to_root);
7592 } else {
7593 __ Sub(temp, temp, path_to_root);
7594 }
7595 }
7596 } else {
7597 // Shift out bits that do not contribute to the comparison.
7598 __ Lsl(temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7599 // Check if the shifted bitstring bits are equal to `path_to_root << (32u - mask_bits)`.
7600 if (flags_update == SetFlags) {
7601 __ Cmp(temp, path_to_root << (32u - mask_bits));
7602 } else {
7603 __ Sub(temp, temp, path_to_root << (32u - mask_bits));
7604 }
7605 }
7606 }
7607}
7608
Artem Serov02d37832016-10-25 15:25:33 +01007609HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
Artem Serovd4cc5b22016-11-04 11:19:09 +00007610 HLoadString::LoadKind desired_string_load_kind) {
7611 switch (desired_string_load_kind) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007612 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007613 case HLoadString::LoadKind::kBootImageRelRo:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007614 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01007615 DCHECK(!GetCompilerOptions().IsJitCompiler());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007616 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007617 case HLoadString::LoadKind::kJitBootImageAddress:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007618 case HLoadString::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01007619 DCHECK(GetCompilerOptions().IsJitCompiler());
Artem Serovc5fcb442016-12-02 19:19:58 +00007620 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007621 case HLoadString::LoadKind::kRuntimeCall:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007622 break;
7623 }
7624 return desired_string_load_kind;
Artem Serov02d37832016-10-25 15:25:33 +01007625}
7626
7627void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007628 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007629 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Artem Serov02d37832016-10-25 15:25:33 +01007630 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007631 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Artem Serov02d37832016-10-25 15:25:33 +01007632 locations->SetOut(LocationFrom(r0));
7633 } else {
7634 locations->SetOut(Location::RequiresRegister());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007635 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7636 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007637 // Rely on the pResolveString and marking to save everything we need, including temps.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007638 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007639 } else {
7640 // For non-Baker read barrier we have a temp-clobbering call.
7641 }
7642 }
Artem Serov02d37832016-10-25 15:25:33 +01007643 }
7644}
7645
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007646// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7647// move.
7648void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007649 LocationSummary* locations = load->GetLocations();
7650 Location out_loc = locations->Out();
7651 vixl32::Register out = OutputRegister(load);
7652 HLoadString::LoadKind load_kind = load->GetLoadKind();
7653
7654 switch (load_kind) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007655 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01007656 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
7657 codegen_->GetCompilerOptions().IsBootImageExtension());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007658 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007659 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007660 codegen_->EmitMovwMovtPlaceholder(labels, out);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007661 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007662 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007663 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007664 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7665 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Markode91ca92020-10-27 13:41:40 +00007666 codegen_->NewBootImageRelRoPatch(CodeGenerator::GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007667 codegen_->EmitMovwMovtPlaceholder(labels, out);
Andreas Gampe3db70682018-12-26 15:12:03 -08007668 __ Ldr(out, MemOperand(out, /* offset= */ 0));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007669 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007670 }
7671 case HLoadString::LoadKind::kBssEntry: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007672 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007673 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
Vladimir Markof3c52b42017-11-17 17:32:12 +00007674 codegen_->EmitMovwMovtPlaceholder(labels, out);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01007675 // All aligned loads are implicitly atomic consume operations on ARM.
Vladimir Markoca1e0382018-04-11 09:58:41 +00007676 codegen_->GenerateGcRootFieldLoad(
Andreas Gampe3db70682018-12-26 15:12:03 -08007677 load, out_loc, out, /* offset= */ 0, kCompilerReadBarrierOption);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007678 LoadStringSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007679 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARMVIXL(load);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007680 codegen_->AddSlowPath(slow_path);
7681 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7682 __ Bind(slow_path->GetExitLabel());
Andra Danciua0130e82020-07-23 12:34:56 +00007683 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 17);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007684 return;
7685 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007686 case HLoadString::LoadKind::kJitBootImageAddress: {
7687 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
7688 DCHECK_NE(address, 0u);
7689 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7690 return;
7691 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00007692 case HLoadString::LoadKind::kJitTableAddress: {
Artem Serovc5fcb442016-12-02 19:19:58 +00007693 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007694 load->GetStringIndex(),
7695 load->GetString()));
Artem Serovc5fcb442016-12-02 19:19:58 +00007696 // /* GcRoot<mirror::String> */ out = *out
Vladimir Markoca1e0382018-04-11 09:58:41 +00007697 codegen_->GenerateGcRootFieldLoad(
Andreas Gampe3db70682018-12-26 15:12:03 -08007698 load, out_loc, out, /* offset= */ 0, kCompilerReadBarrierOption);
Artem Serovc5fcb442016-12-02 19:19:58 +00007699 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007700 }
7701 default:
7702 break;
7703 }
Artem Serov02d37832016-10-25 15:25:33 +01007704
7705 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007706 DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kRuntimeCall);
Artem Serov02d37832016-10-25 15:25:33 +01007707 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007708 __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Artem Serov02d37832016-10-25 15:25:33 +01007709 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7710 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Andra Danciua0130e82020-07-23 12:34:56 +00007711 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 18);
Artem Serov02d37832016-10-25 15:25:33 +01007712}
7713
7714static int32_t GetExceptionTlsOffset() {
7715 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
7716}
7717
7718void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) {
7719 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007720 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01007721 locations->SetOut(Location::RequiresRegister());
7722}
7723
7724void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) {
7725 vixl32::Register out = OutputRegister(load);
7726 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
7727}
7728
7729
7730void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007731 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01007732}
7733
7734void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7735 UseScratchRegisterScope temps(GetVIXLAssembler());
7736 vixl32::Register temp = temps.Acquire();
7737 __ Mov(temp, 0);
7738 GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset());
7739}
7740
7741void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007742 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7743 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov02d37832016-10-25 15:25:33 +01007744 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7745 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
7746}
7747
7748void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
7749 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
7750 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7751}
7752
Artem Serov657022c2016-11-23 14:19:38 +00007753// Temp is used for read barrier.
7754static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
7755 if (kEmitCompilerReadBarrier &&
7756 (kUseBakerReadBarrier ||
7757 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
7758 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
7759 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
7760 return 1;
7761 }
7762 return 0;
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007763}
7764
Artem Serov657022c2016-11-23 14:19:38 +00007765// Interface case has 3 temps, one for holding the number of interfaces, one for the current
7766// interface pointer, one for loading the current interface.
7767// The other checks have one temp for loading the object's class.
7768static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
7769 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7770 return 3;
7771 }
7772 return 1 + NumberOfInstanceOfTemps(type_check_kind);
7773}
Artem Serovcfbe9132016-10-14 15:58:56 +01007774
7775void LocationsBuilderARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
7776 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7777 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7778 bool baker_read_barrier_slow_path = false;
7779 switch (type_check_kind) {
7780 case TypeCheckKind::kExactCheck:
7781 case TypeCheckKind::kAbstractClassCheck:
7782 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00007783 case TypeCheckKind::kArrayObjectCheck: {
7784 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
7785 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
7786 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Artem Serovcfbe9132016-10-14 15:58:56 +01007787 break;
Vladimir Marko87584542017-12-12 17:47:52 +00007788 }
Artem Serovcfbe9132016-10-14 15:58:56 +01007789 case TypeCheckKind::kArrayCheck:
7790 case TypeCheckKind::kUnresolvedCheck:
7791 case TypeCheckKind::kInterfaceCheck:
7792 call_kind = LocationSummary::kCallOnSlowPath;
7793 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00007794 case TypeCheckKind::kBitstringCheck:
7795 break;
Artem Serovcfbe9132016-10-14 15:58:56 +01007796 }
7797
Vladimir Markoca6fff82017-10-03 14:49:14 +01007798 LocationSummary* locations =
7799 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Artem Serovcfbe9132016-10-14 15:58:56 +01007800 if (baker_read_barrier_slow_path) {
7801 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7802 }
7803 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007804 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7805 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7806 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7807 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
7808 } else {
7809 locations->SetInAt(1, Location::RequiresRegister());
7810 }
Artem Serovcfbe9132016-10-14 15:58:56 +01007811 // The "out" register is used as a temporary, so it overlaps with the inputs.
7812 // Note that TypeCheckSlowPathARM uses this register too.
7813 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Artem Serov657022c2016-11-23 14:19:38 +00007814 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Artem Serovcfbe9132016-10-14 15:58:56 +01007815}
7816
7817void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
7818 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7819 LocationSummary* locations = instruction->GetLocations();
7820 Location obj_loc = locations->InAt(0);
7821 vixl32::Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00007822 vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
7823 ? vixl32::Register()
7824 : InputRegisterAt(instruction, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007825 Location out_loc = locations->Out();
7826 vixl32::Register out = OutputRegister(instruction);
Artem Serov657022c2016-11-23 14:19:38 +00007827 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7828 DCHECK_LE(num_temps, 1u);
7829 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Artem Serovcfbe9132016-10-14 15:58:56 +01007830 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7831 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7832 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7833 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007834 vixl32::Label done;
7835 vixl32::Label* const final_label = codegen_->GetFinalLabel(instruction, &done);
Artem Serovcfbe9132016-10-14 15:58:56 +01007836 SlowPathCodeARMVIXL* slow_path = nullptr;
7837
7838 // Return 0 if `obj` is null.
7839 // avoid null check if we know obj is not null.
7840 if (instruction->MustDoNullCheck()) {
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007841 DCHECK(!out.Is(obj));
7842 __ Mov(out, 0);
Andreas Gampe3db70682018-12-26 15:12:03 -08007843 __ CompareAndBranchIfZero(obj, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007844 }
7845
Artem Serovcfbe9132016-10-14 15:58:56 +01007846 switch (type_check_kind) {
7847 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007848 ReadBarrierOption read_barrier_option =
7849 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007850 // /* HeapReference<Class> */ out = obj->klass_
7851 GenerateReferenceLoadTwoRegisters(instruction,
7852 out_loc,
7853 obj_loc,
7854 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007855 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007856 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007857 // Classes must be equal for the instanceof to succeed.
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007858 __ Cmp(out, cls);
7859 // We speculatively set the result to false without changing the condition
7860 // flags, which allows us to avoid some branching later.
7861 __ Mov(LeaveFlags, out, 0);
7862
7863 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7864 // we check that the output is in a low register, so that a 16-bit MOV
7865 // encoding can be used.
7866 if (out.IsLow()) {
7867 // We use the scope because of the IT block that follows.
7868 ExactAssemblyScope guard(GetVIXLAssembler(),
7869 2 * vixl32::k16BitT32InstructionSizeInBytes,
7870 CodeBufferCheckScope::kExactSize);
7871
7872 __ it(eq);
7873 __ mov(eq, out, 1);
7874 } else {
Andreas Gampe3db70682018-12-26 15:12:03 -08007875 __ B(ne, final_label, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007876 __ Mov(out, 1);
7877 }
7878
Artem Serovcfbe9132016-10-14 15:58:56 +01007879 break;
7880 }
7881
7882 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007883 ReadBarrierOption read_barrier_option =
7884 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007885 // /* HeapReference<Class> */ out = obj->klass_
7886 GenerateReferenceLoadTwoRegisters(instruction,
7887 out_loc,
7888 obj_loc,
7889 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007890 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007891 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007892 // If the class is abstract, we eagerly fetch the super class of the
7893 // object to avoid doing a comparison we know will fail.
7894 vixl32::Label loop;
7895 __ Bind(&loop);
7896 // /* HeapReference<Class> */ out = out->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007897 GenerateReferenceLoadOneRegister(instruction,
7898 out_loc,
7899 super_offset,
7900 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007901 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007902 // If `out` is null, we use it for the result, and jump to the final label.
Andreas Gampe3db70682018-12-26 15:12:03 -08007903 __ CompareAndBranchIfZero(out, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007904 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007905 __ B(ne, &loop, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007906 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007907 break;
7908 }
7909
7910 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007911 ReadBarrierOption read_barrier_option =
7912 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007913 // /* HeapReference<Class> */ out = obj->klass_
7914 GenerateReferenceLoadTwoRegisters(instruction,
7915 out_loc,
7916 obj_loc,
7917 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007918 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007919 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007920 // Walk over the class hierarchy to find a match.
7921 vixl32::Label loop, success;
7922 __ Bind(&loop);
7923 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007924 __ B(eq, &success, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007925 // /* HeapReference<Class> */ out = out->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007926 GenerateReferenceLoadOneRegister(instruction,
7927 out_loc,
7928 super_offset,
7929 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007930 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007931 // This is essentially a null check, but it sets the condition flags to the
7932 // proper value for the code that follows the loop, i.e. not `eq`.
7933 __ Cmp(out, 1);
Andreas Gampe3db70682018-12-26 15:12:03 -08007934 __ B(hs, &loop, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007935
7936 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7937 // we check that the output is in a low register, so that a 16-bit MOV
7938 // encoding can be used.
7939 if (out.IsLow()) {
7940 // If `out` is null, we use it for the result, and the condition flags
7941 // have already been set to `ne`, so the IT block that comes afterwards
7942 // (and which handles the successful case) turns into a NOP (instead of
7943 // overwriting `out`).
7944 __ Bind(&success);
7945
7946 // We use the scope because of the IT block that follows.
7947 ExactAssemblyScope guard(GetVIXLAssembler(),
7948 2 * vixl32::k16BitT32InstructionSizeInBytes,
7949 CodeBufferCheckScope::kExactSize);
7950
7951 // There is only one branch to the `success` label (which is bound to this
7952 // IT block), and it has the same condition, `eq`, so in that case the MOV
7953 // is executed.
7954 __ it(eq);
7955 __ mov(eq, out, 1);
7956 } else {
7957 // If `out` is null, we use it for the result, and jump to the final label.
Anton Kirilov6f644202017-02-27 18:29:45 +00007958 __ B(final_label);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007959 __ Bind(&success);
7960 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007961 }
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007962
Artem Serovcfbe9132016-10-14 15:58:56 +01007963 break;
7964 }
7965
7966 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007967 ReadBarrierOption read_barrier_option =
7968 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007969 // /* HeapReference<Class> */ out = obj->klass_
7970 GenerateReferenceLoadTwoRegisters(instruction,
7971 out_loc,
7972 obj_loc,
7973 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007974 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007975 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007976 // Do an exact check.
7977 vixl32::Label exact_check;
7978 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007979 __ B(eq, &exact_check, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007980 // Otherwise, we need to check that the object's class is a non-primitive array.
7981 // /* HeapReference<Class> */ out = out->component_type_
Artem Serov657022c2016-11-23 14:19:38 +00007982 GenerateReferenceLoadOneRegister(instruction,
7983 out_loc,
7984 component_offset,
7985 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007986 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007987 // If `out` is null, we use it for the result, and jump to the final label.
Andreas Gampe3db70682018-12-26 15:12:03 -08007988 __ CompareAndBranchIfZero(out, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007989 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7990 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007991 __ Cmp(out, 0);
7992 // We speculatively set the result to false without changing the condition
7993 // flags, which allows us to avoid some branching later.
7994 __ Mov(LeaveFlags, out, 0);
7995
7996 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7997 // we check that the output is in a low register, so that a 16-bit MOV
7998 // encoding can be used.
7999 if (out.IsLow()) {
8000 __ Bind(&exact_check);
8001
8002 // We use the scope because of the IT block that follows.
8003 ExactAssemblyScope guard(GetVIXLAssembler(),
8004 2 * vixl32::k16BitT32InstructionSizeInBytes,
8005 CodeBufferCheckScope::kExactSize);
8006
8007 __ it(eq);
8008 __ mov(eq, out, 1);
8009 } else {
Andreas Gampe3db70682018-12-26 15:12:03 -08008010 __ B(ne, final_label, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00008011 __ Bind(&exact_check);
8012 __ Mov(out, 1);
8013 }
8014
Artem Serovcfbe9132016-10-14 15:58:56 +01008015 break;
8016 }
8017
8018 case TypeCheckKind::kArrayCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00008019 // No read barrier since the slow path will retry upon failure.
Mathieu Chartier6beced42016-11-15 15:51:31 -08008020 // /* HeapReference<Class> */ out = obj->klass_
8021 GenerateReferenceLoadTwoRegisters(instruction,
8022 out_loc,
8023 obj_loc,
8024 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00008025 maybe_temp_loc,
8026 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01008027 __ Cmp(out, cls);
8028 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01008029 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
Andreas Gampe3db70682018-12-26 15:12:03 -08008030 instruction, /* is_fatal= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01008031 codegen_->AddSlowPath(slow_path);
8032 __ B(ne, slow_path->GetEntryLabel());
8033 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01008034 break;
8035 }
8036
8037 case TypeCheckKind::kUnresolvedCheck:
8038 case TypeCheckKind::kInterfaceCheck: {
8039 // Note that we indeed only call on slow path, but we always go
8040 // into the slow path for the unresolved and interface check
8041 // cases.
8042 //
8043 // We cannot directly call the InstanceofNonTrivial runtime
8044 // entry point without resorting to a type checking slow path
8045 // here (i.e. by calling InvokeRuntime directly), as it would
8046 // require to assign fixed registers for the inputs of this
8047 // HInstanceOf instruction (following the runtime calling
8048 // convention), which might be cluttered by the potential first
8049 // read barrier emission at the beginning of this method.
8050 //
8051 // TODO: Introduce a new runtime entry point taking the object
8052 // to test (instead of its class) as argument, and let it deal
8053 // with the read barrier issues. This will let us refactor this
8054 // case of the `switch` code as it was previously (with a direct
8055 // call to the runtime not using a type checking slow path).
8056 // This should also be beneficial for the other cases above.
8057 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01008058 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
Andreas Gampe3db70682018-12-26 15:12:03 -08008059 instruction, /* is_fatal= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01008060 codegen_->AddSlowPath(slow_path);
8061 __ B(slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01008062 break;
8063 }
Vladimir Marko175e7862018-03-27 09:03:13 +00008064
8065 case TypeCheckKind::kBitstringCheck: {
8066 // /* HeapReference<Class> */ temp = obj->klass_
8067 GenerateReferenceLoadTwoRegisters(instruction,
8068 out_loc,
8069 obj_loc,
8070 class_offset,
8071 maybe_temp_loc,
8072 kWithoutReadBarrier);
8073
8074 GenerateBitstringTypeCheckCompare(instruction, out, DontCare);
8075 // If `out` is a low reg and we would have another low reg temp, we could
8076 // optimize this as RSBS+ADC, see GenerateConditionWithZero().
8077 //
8078 // Also, in some cases when `out` is a low reg and we're loading a constant to IP
8079 // it would make sense to use CMP+MOV+IT+MOV instead of SUB+CLZ+LSR as the code size
8080 // would be the same and we would have fewer direct data dependencies.
8081 codegen_->GenerateConditionWithZero(kCondEQ, out, out); // CLZ+LSR
8082 break;
8083 }
Artem Serovcfbe9132016-10-14 15:58:56 +01008084 }
8085
Artem Serovcfbe9132016-10-14 15:58:56 +01008086 if (done.IsReferenced()) {
8087 __ Bind(&done);
8088 }
8089
8090 if (slow_path != nullptr) {
8091 __ Bind(slow_path->GetExitLabel());
8092 }
8093}
8094
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008095void LocationsBuilderARMVIXL::VisitCheckCast(HCheckCast* instruction) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008096 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00008097 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01008098 LocationSummary* locations =
8099 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008100 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00008101 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
8102 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
8103 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
8104 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
8105 } else {
8106 locations->SetInAt(1, Location::RequiresRegister());
8107 }
Artem Serov657022c2016-11-23 14:19:38 +00008108 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008109}
8110
8111void InstructionCodeGeneratorARMVIXL::VisitCheckCast(HCheckCast* instruction) {
8112 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
8113 LocationSummary* locations = instruction->GetLocations();
8114 Location obj_loc = locations->InAt(0);
8115 vixl32::Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00008116 vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
8117 ? vixl32::Register()
8118 : InputRegisterAt(instruction, 1);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008119 Location temp_loc = locations->GetTemp(0);
8120 vixl32::Register temp = RegisterFrom(temp_loc);
Artem Serov657022c2016-11-23 14:19:38 +00008121 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
8122 DCHECK_LE(num_temps, 3u);
8123 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
8124 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
8125 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
8126 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
8127 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
8128 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
8129 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
8130 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
8131 const uint32_t object_array_data_offset =
8132 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008133
Vladimir Marko87584542017-12-12 17:47:52 +00008134 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008135 SlowPathCodeARMVIXL* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01008136 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
8137 instruction, is_type_check_slow_path_fatal);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008138 codegen_->AddSlowPath(type_check_slow_path);
8139
8140 vixl32::Label done;
Anton Kirilov6f644202017-02-27 18:29:45 +00008141 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008142 // Avoid null check if we know obj is not null.
8143 if (instruction->MustDoNullCheck()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08008144 __ CompareAndBranchIfZero(obj, final_label, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008145 }
8146
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008147 switch (type_check_kind) {
8148 case TypeCheckKind::kExactCheck:
8149 case TypeCheckKind::kArrayCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00008150 // /* HeapReference<Class> */ temp = obj->klass_
8151 GenerateReferenceLoadTwoRegisters(instruction,
8152 temp_loc,
8153 obj_loc,
8154 class_offset,
8155 maybe_temp2_loc,
8156 kWithoutReadBarrier);
8157
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008158 __ Cmp(temp, cls);
8159 // Jump to slow path for throwing the exception or doing a
8160 // more involved array check.
8161 __ B(ne, type_check_slow_path->GetEntryLabel());
8162 break;
8163 }
8164
8165 case TypeCheckKind::kAbstractClassCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00008166 // /* HeapReference<Class> */ temp = obj->klass_
8167 GenerateReferenceLoadTwoRegisters(instruction,
8168 temp_loc,
8169 obj_loc,
8170 class_offset,
8171 maybe_temp2_loc,
8172 kWithoutReadBarrier);
8173
Artem Serovcfbe9132016-10-14 15:58:56 +01008174 // If the class is abstract, we eagerly fetch the super class of the
8175 // object to avoid doing a comparison we know will fail.
8176 vixl32::Label loop;
8177 __ Bind(&loop);
8178 // /* HeapReference<Class> */ temp = temp->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00008179 GenerateReferenceLoadOneRegister(instruction,
8180 temp_loc,
8181 super_offset,
8182 maybe_temp2_loc,
8183 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01008184
8185 // If the class reference currently in `temp` is null, jump to the slow path to throw the
8186 // exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00008187 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01008188
8189 // Otherwise, compare the classes.
8190 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08008191 __ B(ne, &loop, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008192 break;
8193 }
8194
8195 case TypeCheckKind::kClassHierarchyCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00008196 // /* HeapReference<Class> */ temp = obj->klass_
8197 GenerateReferenceLoadTwoRegisters(instruction,
8198 temp_loc,
8199 obj_loc,
8200 class_offset,
8201 maybe_temp2_loc,
8202 kWithoutReadBarrier);
8203
Artem Serovcfbe9132016-10-14 15:58:56 +01008204 // Walk over the class hierarchy to find a match.
8205 vixl32::Label loop;
8206 __ Bind(&loop);
8207 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08008208 __ B(eq, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01008209
8210 // /* HeapReference<Class> */ temp = temp->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00008211 GenerateReferenceLoadOneRegister(instruction,
8212 temp_loc,
8213 super_offset,
8214 maybe_temp2_loc,
8215 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01008216
8217 // If the class reference currently in `temp` is null, jump to the slow path to throw the
8218 // exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00008219 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01008220 // Otherwise, jump to the beginning of the loop.
8221 __ B(&loop);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008222 break;
8223 }
8224
Artem Serovcfbe9132016-10-14 15:58:56 +01008225 case TypeCheckKind::kArrayObjectCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00008226 // /* HeapReference<Class> */ temp = obj->klass_
8227 GenerateReferenceLoadTwoRegisters(instruction,
8228 temp_loc,
8229 obj_loc,
8230 class_offset,
8231 maybe_temp2_loc,
8232 kWithoutReadBarrier);
8233
Artem Serovcfbe9132016-10-14 15:58:56 +01008234 // Do an exact check.
8235 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08008236 __ B(eq, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01008237
8238 // Otherwise, we need to check that the object's class is a non-primitive array.
8239 // /* HeapReference<Class> */ temp = temp->component_type_
Artem Serov657022c2016-11-23 14:19:38 +00008240 GenerateReferenceLoadOneRegister(instruction,
8241 temp_loc,
8242 component_offset,
8243 maybe_temp2_loc,
8244 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01008245 // If the component type is null, jump to the slow path to throw the exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00008246 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01008247 // Otherwise,the object is indeed an array, jump to label `check_non_primitive_component_type`
8248 // to further check that this component type is not a primitive type.
8249 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008250 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00008251 __ CompareAndBranchIfNonZero(temp, type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008252 break;
8253 }
8254
8255 case TypeCheckKind::kUnresolvedCheck:
Artem Serov657022c2016-11-23 14:19:38 +00008256 // We always go into the type check slow path for the unresolved check case.
Artem Serovcfbe9132016-10-14 15:58:56 +01008257 // We cannot directly call the CheckCast runtime entry point
8258 // without resorting to a type checking slow path here (i.e. by
8259 // calling InvokeRuntime directly), as it would require to
8260 // assign fixed registers for the inputs of this HInstanceOf
8261 // instruction (following the runtime calling convention), which
8262 // might be cluttered by the potential first read barrier
8263 // emission at the beginning of this method.
Artem Serov657022c2016-11-23 14:19:38 +00008264
Artem Serovcfbe9132016-10-14 15:58:56 +01008265 __ B(type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008266 break;
Artem Serov657022c2016-11-23 14:19:38 +00008267
8268 case TypeCheckKind::kInterfaceCheck: {
8269 // Avoid read barriers to improve performance of the fast path. We can not get false
8270 // positives by doing this.
8271 // /* HeapReference<Class> */ temp = obj->klass_
8272 GenerateReferenceLoadTwoRegisters(instruction,
8273 temp_loc,
8274 obj_loc,
8275 class_offset,
8276 maybe_temp2_loc,
8277 kWithoutReadBarrier);
8278
8279 // /* HeapReference<Class> */ temp = temp->iftable_
8280 GenerateReferenceLoadTwoRegisters(instruction,
8281 temp_loc,
8282 temp_loc,
8283 iftable_offset,
8284 maybe_temp2_loc,
8285 kWithoutReadBarrier);
8286 // Iftable is never null.
8287 __ Ldr(RegisterFrom(maybe_temp2_loc), MemOperand(temp, array_length_offset));
8288 // Loop through the iftable and check if any class matches.
8289 vixl32::Label start_loop;
8290 __ Bind(&start_loop);
8291 __ CompareAndBranchIfZero(RegisterFrom(maybe_temp2_loc),
8292 type_check_slow_path->GetEntryLabel());
8293 __ Ldr(RegisterFrom(maybe_temp3_loc), MemOperand(temp, object_array_data_offset));
8294 GetAssembler()->MaybeUnpoisonHeapReference(RegisterFrom(maybe_temp3_loc));
8295 // Go to next interface.
8296 __ Add(temp, temp, Operand::From(2 * kHeapReferenceSize));
8297 __ Sub(RegisterFrom(maybe_temp2_loc), RegisterFrom(maybe_temp2_loc), 2);
8298 // Compare the classes and continue the loop if they do not match.
8299 __ Cmp(cls, RegisterFrom(maybe_temp3_loc));
Andreas Gampe3db70682018-12-26 15:12:03 -08008300 __ B(ne, &start_loop, /* is_far_target= */ false);
Artem Serov657022c2016-11-23 14:19:38 +00008301 break;
8302 }
Vladimir Marko175e7862018-03-27 09:03:13 +00008303
8304 case TypeCheckKind::kBitstringCheck: {
8305 // /* HeapReference<Class> */ temp = obj->klass_
8306 GenerateReferenceLoadTwoRegisters(instruction,
8307 temp_loc,
8308 obj_loc,
8309 class_offset,
8310 maybe_temp2_loc,
8311 kWithoutReadBarrier);
8312
8313 GenerateBitstringTypeCheckCompare(instruction, temp, SetFlags);
8314 __ B(ne, type_check_slow_path->GetEntryLabel());
8315 break;
8316 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008317 }
Anton Kirilov6f644202017-02-27 18:29:45 +00008318 if (done.IsReferenced()) {
8319 __ Bind(&done);
8320 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008321
8322 __ Bind(type_check_slow_path->GetExitLabel());
8323}
8324
Artem Serov551b28f2016-10-18 19:11:30 +01008325void LocationsBuilderARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008326 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
8327 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov551b28f2016-10-18 19:11:30 +01008328 InvokeRuntimeCallingConventionARMVIXL calling_convention;
8329 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
8330}
8331
8332void InstructionCodeGeneratorARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
8333 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
8334 instruction,
8335 instruction->GetDexPc());
8336 if (instruction->IsEnter()) {
8337 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
8338 } else {
8339 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8340 }
Andra Danciua0130e82020-07-23 12:34:56 +00008341 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 19);
Artem Serov551b28f2016-10-18 19:11:30 +01008342}
8343
Artem Serov02109dd2016-09-23 17:17:54 +01008344void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) {
8345 HandleBitwiseOperation(instruction, AND);
8346}
8347
8348void LocationsBuilderARMVIXL::VisitOr(HOr* instruction) {
8349 HandleBitwiseOperation(instruction, ORR);
8350}
8351
8352void LocationsBuilderARMVIXL::VisitXor(HXor* instruction) {
8353 HandleBitwiseOperation(instruction, EOR);
8354}
8355
8356void LocationsBuilderARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) {
8357 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008358 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008359 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
8360 || instruction->GetResultType() == DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01008361 // Note: GVN reorders commutative operations to have the constant on the right hand side.
8362 locations->SetInAt(0, Location::RequiresRegister());
8363 locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode));
8364 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8365}
8366
8367void InstructionCodeGeneratorARMVIXL::VisitAnd(HAnd* instruction) {
8368 HandleBitwiseOperation(instruction);
8369}
8370
8371void InstructionCodeGeneratorARMVIXL::VisitOr(HOr* instruction) {
8372 HandleBitwiseOperation(instruction);
8373}
8374
8375void InstructionCodeGeneratorARMVIXL::VisitXor(HXor* instruction) {
8376 HandleBitwiseOperation(instruction);
8377}
8378
Artem Serov2bbc9532016-10-21 11:51:50 +01008379void LocationsBuilderARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
8380 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008381 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008382 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
8383 || instruction->GetResultType() == DataType::Type::kInt64);
Artem Serov2bbc9532016-10-21 11:51:50 +01008384
8385 locations->SetInAt(0, Location::RequiresRegister());
8386 locations->SetInAt(1, Location::RequiresRegister());
8387 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8388}
8389
8390void InstructionCodeGeneratorARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
8391 LocationSummary* locations = instruction->GetLocations();
8392 Location first = locations->InAt(0);
8393 Location second = locations->InAt(1);
8394 Location out = locations->Out();
8395
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008396 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov2bbc9532016-10-21 11:51:50 +01008397 vixl32::Register first_reg = RegisterFrom(first);
8398 vixl32::Register second_reg = RegisterFrom(second);
8399 vixl32::Register out_reg = RegisterFrom(out);
8400
8401 switch (instruction->GetOpKind()) {
8402 case HInstruction::kAnd:
8403 __ Bic(out_reg, first_reg, second_reg);
8404 break;
8405 case HInstruction::kOr:
8406 __ Orn(out_reg, first_reg, second_reg);
8407 break;
8408 // There is no EON on arm.
8409 case HInstruction::kXor:
8410 default:
8411 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8412 UNREACHABLE();
8413 }
8414 return;
8415
8416 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008417 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov2bbc9532016-10-21 11:51:50 +01008418 vixl32::Register first_low = LowRegisterFrom(first);
8419 vixl32::Register first_high = HighRegisterFrom(first);
8420 vixl32::Register second_low = LowRegisterFrom(second);
8421 vixl32::Register second_high = HighRegisterFrom(second);
8422 vixl32::Register out_low = LowRegisterFrom(out);
8423 vixl32::Register out_high = HighRegisterFrom(out);
8424
8425 switch (instruction->GetOpKind()) {
8426 case HInstruction::kAnd:
8427 __ Bic(out_low, first_low, second_low);
8428 __ Bic(out_high, first_high, second_high);
8429 break;
8430 case HInstruction::kOr:
8431 __ Orn(out_low, first_low, second_low);
8432 __ Orn(out_high, first_high, second_high);
8433 break;
8434 // There is no EON on arm.
8435 case HInstruction::kXor:
8436 default:
8437 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8438 UNREACHABLE();
8439 }
8440 }
8441}
8442
Anton Kirilov74234da2017-01-13 14:42:47 +00008443void LocationsBuilderARMVIXL::VisitDataProcWithShifterOp(
8444 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008445 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
8446 instruction->GetType() == DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00008447 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008448 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008449 const bool overlap = instruction->GetType() == DataType::Type::kInt64 &&
Anton Kirilov74234da2017-01-13 14:42:47 +00008450 HDataProcWithShifterOp::IsExtensionOp(instruction->GetOpKind());
8451
8452 locations->SetInAt(0, Location::RequiresRegister());
8453 locations->SetInAt(1, Location::RequiresRegister());
8454 locations->SetOut(Location::RequiresRegister(),
8455 overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap);
8456}
8457
8458void InstructionCodeGeneratorARMVIXL::VisitDataProcWithShifterOp(
8459 HDataProcWithShifterOp* instruction) {
8460 const LocationSummary* const locations = instruction->GetLocations();
8461 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
8462 const HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
8463
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008464 if (instruction->GetType() == DataType::Type::kInt32) {
Anton Kirilov420ee302017-02-21 18:10:26 +00008465 const vixl32::Register first = InputRegisterAt(instruction, 0);
8466 const vixl32::Register output = OutputRegister(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008467 const vixl32::Register second = instruction->InputAt(1)->GetType() == DataType::Type::kInt64
Anton Kirilov74234da2017-01-13 14:42:47 +00008468 ? LowRegisterFrom(locations->InAt(1))
8469 : InputRegisterAt(instruction, 1);
8470
Anton Kirilov420ee302017-02-21 18:10:26 +00008471 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
8472 DCHECK_EQ(kind, HInstruction::kAdd);
8473
8474 switch (op_kind) {
8475 case HDataProcWithShifterOp::kUXTB:
8476 __ Uxtab(output, first, second);
8477 break;
8478 case HDataProcWithShifterOp::kUXTH:
8479 __ Uxtah(output, first, second);
8480 break;
8481 case HDataProcWithShifterOp::kSXTB:
8482 __ Sxtab(output, first, second);
8483 break;
8484 case HDataProcWithShifterOp::kSXTH:
8485 __ Sxtah(output, first, second);
8486 break;
8487 default:
8488 LOG(FATAL) << "Unexpected operation kind: " << op_kind;
8489 UNREACHABLE();
8490 }
8491 } else {
8492 GenerateDataProcInstruction(kind,
8493 output,
8494 first,
8495 Operand(second,
8496 ShiftFromOpKind(op_kind),
8497 instruction->GetShiftAmount()),
8498 codegen_);
8499 }
Anton Kirilov74234da2017-01-13 14:42:47 +00008500 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008501 DCHECK_EQ(instruction->GetType(), DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00008502
8503 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
8504 const vixl32::Register second = InputRegisterAt(instruction, 1);
8505
8506 DCHECK(!LowRegisterFrom(locations->Out()).Is(second));
8507 GenerateDataProc(kind,
8508 locations->Out(),
8509 locations->InAt(0),
8510 second,
8511 Operand(second, ShiftType::ASR, 31),
8512 codegen_);
8513 } else {
8514 GenerateLongDataProc(instruction, codegen_);
8515 }
8516 }
8517}
8518
Artem Serov02109dd2016-09-23 17:17:54 +01008519// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8520void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out,
8521 vixl32::Register first,
8522 uint32_t value) {
8523 // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier).
8524 if (value == 0xffffffffu) {
8525 if (!out.Is(first)) {
8526 __ Mov(out, first);
8527 }
8528 return;
8529 }
8530 if (value == 0u) {
8531 __ Mov(out, 0);
8532 return;
8533 }
8534 if (GetAssembler()->ShifterOperandCanHold(AND, value)) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008535 __ And(out, first, value);
8536 } else if (GetAssembler()->ShifterOperandCanHold(BIC, ~value)) {
8537 __ Bic(out, first, ~value);
Artem Serov02109dd2016-09-23 17:17:54 +01008538 } else {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008539 DCHECK(IsPowerOfTwo(value + 1));
8540 __ Ubfx(out, first, 0, WhichPowerOf2(value + 1));
Artem Serov02109dd2016-09-23 17:17:54 +01008541 }
8542}
8543
8544// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8545void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out,
8546 vixl32::Register first,
8547 uint32_t value) {
8548 // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier).
8549 if (value == 0u) {
8550 if (!out.Is(first)) {
8551 __ Mov(out, first);
8552 }
8553 return;
8554 }
8555 if (value == 0xffffffffu) {
8556 __ Mvn(out, 0);
8557 return;
8558 }
8559 if (GetAssembler()->ShifterOperandCanHold(ORR, value)) {
8560 __ Orr(out, first, value);
8561 } else {
8562 DCHECK(GetAssembler()->ShifterOperandCanHold(ORN, ~value));
8563 __ Orn(out, first, ~value);
8564 }
8565}
8566
8567// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8568void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out,
8569 vixl32::Register first,
8570 uint32_t value) {
8571 // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier).
8572 if (value == 0u) {
8573 if (!out.Is(first)) {
8574 __ Mov(out, first);
8575 }
8576 return;
8577 }
8578 __ Eor(out, first, value);
8579}
8580
Anton Kirilovdda43962016-11-21 19:55:20 +00008581void InstructionCodeGeneratorARMVIXL::GenerateAddLongConst(Location out,
8582 Location first,
8583 uint64_t value) {
8584 vixl32::Register out_low = LowRegisterFrom(out);
8585 vixl32::Register out_high = HighRegisterFrom(out);
8586 vixl32::Register first_low = LowRegisterFrom(first);
8587 vixl32::Register first_high = HighRegisterFrom(first);
8588 uint32_t value_low = Low32Bits(value);
8589 uint32_t value_high = High32Bits(value);
8590 if (value_low == 0u) {
8591 if (!out_low.Is(first_low)) {
8592 __ Mov(out_low, first_low);
8593 }
8594 __ Add(out_high, first_high, value_high);
8595 return;
8596 }
8597 __ Adds(out_low, first_low, value_low);
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00008598 if (GetAssembler()->ShifterOperandCanHold(ADC, value_high)) {
Anton Kirilovdda43962016-11-21 19:55:20 +00008599 __ Adc(out_high, first_high, value_high);
Anton Kirilovdda43962016-11-21 19:55:20 +00008600 } else {
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00008601 DCHECK(GetAssembler()->ShifterOperandCanHold(SBC, ~value_high));
8602 __ Sbc(out_high, first_high, ~value_high);
Anton Kirilovdda43962016-11-21 19:55:20 +00008603 }
8604}
8605
Artem Serov02109dd2016-09-23 17:17:54 +01008606void InstructionCodeGeneratorARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction) {
8607 LocationSummary* locations = instruction->GetLocations();
8608 Location first = locations->InAt(0);
8609 Location second = locations->InAt(1);
8610 Location out = locations->Out();
8611
8612 if (second.IsConstant()) {
8613 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
8614 uint32_t value_low = Low32Bits(value);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008615 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov02109dd2016-09-23 17:17:54 +01008616 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
8617 vixl32::Register out_reg = OutputRegister(instruction);
8618 if (instruction->IsAnd()) {
8619 GenerateAndConst(out_reg, first_reg, value_low);
8620 } else if (instruction->IsOr()) {
8621 GenerateOrrConst(out_reg, first_reg, value_low);
8622 } else {
8623 DCHECK(instruction->IsXor());
8624 GenerateEorConst(out_reg, first_reg, value_low);
8625 }
8626 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008627 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01008628 uint32_t value_high = High32Bits(value);
8629 vixl32::Register first_low = LowRegisterFrom(first);
8630 vixl32::Register first_high = HighRegisterFrom(first);
8631 vixl32::Register out_low = LowRegisterFrom(out);
8632 vixl32::Register out_high = HighRegisterFrom(out);
8633 if (instruction->IsAnd()) {
8634 GenerateAndConst(out_low, first_low, value_low);
8635 GenerateAndConst(out_high, first_high, value_high);
8636 } else if (instruction->IsOr()) {
8637 GenerateOrrConst(out_low, first_low, value_low);
8638 GenerateOrrConst(out_high, first_high, value_high);
8639 } else {
8640 DCHECK(instruction->IsXor());
8641 GenerateEorConst(out_low, first_low, value_low);
8642 GenerateEorConst(out_high, first_high, value_high);
8643 }
8644 }
8645 return;
8646 }
8647
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008648 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov02109dd2016-09-23 17:17:54 +01008649 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
8650 vixl32::Register second_reg = InputRegisterAt(instruction, 1);
8651 vixl32::Register out_reg = OutputRegister(instruction);
8652 if (instruction->IsAnd()) {
8653 __ And(out_reg, first_reg, second_reg);
8654 } else if (instruction->IsOr()) {
8655 __ Orr(out_reg, first_reg, second_reg);
8656 } else {
8657 DCHECK(instruction->IsXor());
8658 __ Eor(out_reg, first_reg, second_reg);
8659 }
8660 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008661 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01008662 vixl32::Register first_low = LowRegisterFrom(first);
8663 vixl32::Register first_high = HighRegisterFrom(first);
8664 vixl32::Register second_low = LowRegisterFrom(second);
8665 vixl32::Register second_high = HighRegisterFrom(second);
8666 vixl32::Register out_low = LowRegisterFrom(out);
8667 vixl32::Register out_high = HighRegisterFrom(out);
8668 if (instruction->IsAnd()) {
8669 __ And(out_low, first_low, second_low);
8670 __ And(out_high, first_high, second_high);
8671 } else if (instruction->IsOr()) {
8672 __ Orr(out_low, first_low, second_low);
8673 __ Orr(out_high, first_high, second_high);
8674 } else {
8675 DCHECK(instruction->IsXor());
8676 __ Eor(out_low, first_low, second_low);
8677 __ Eor(out_high, first_high, second_high);
8678 }
8679 }
8680}
8681
Artem Serovcfbe9132016-10-14 15:58:56 +01008682void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadOneRegister(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008683 HInstruction* instruction,
Artem Serovcfbe9132016-10-14 15:58:56 +01008684 Location out,
8685 uint32_t offset,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008686 Location maybe_temp,
8687 ReadBarrierOption read_barrier_option) {
Artem Serovcfbe9132016-10-14 15:58:56 +01008688 vixl32::Register out_reg = RegisterFrom(out);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008689 if (read_barrier_option == kWithReadBarrier) {
8690 CHECK(kEmitCompilerReadBarrier);
8691 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
8692 if (kUseBakerReadBarrier) {
8693 // Load with fast path based Baker's read barrier.
8694 // /* HeapReference<Object> */ out = *(out + offset)
8695 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008696 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check= */ false);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008697 } else {
8698 // Load with slow path based read barrier.
8699 // Save the value of `out` into `maybe_temp` before overwriting it
8700 // in the following move operation, as we will need it for the
8701 // read barrier below.
8702 __ Mov(RegisterFrom(maybe_temp), out_reg);
8703 // /* HeapReference<Object> */ out = *(out + offset)
8704 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
8705 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
8706 }
Artem Serovcfbe9132016-10-14 15:58:56 +01008707 } else {
8708 // Plain load with no read barrier.
8709 // /* HeapReference<Object> */ out = *(out + offset)
8710 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
8711 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
8712 }
8713}
8714
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008715void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadTwoRegisters(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008716 HInstruction* instruction,
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008717 Location out,
8718 Location obj,
8719 uint32_t offset,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008720 Location maybe_temp,
8721 ReadBarrierOption read_barrier_option) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008722 vixl32::Register out_reg = RegisterFrom(out);
8723 vixl32::Register obj_reg = RegisterFrom(obj);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008724 if (read_barrier_option == kWithReadBarrier) {
8725 CHECK(kEmitCompilerReadBarrier);
8726 if (kUseBakerReadBarrier) {
8727 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
8728 // Load with fast path based Baker's read barrier.
8729 // /* HeapReference<Object> */ out = *(obj + offset)
8730 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008731 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check= */ false);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008732 } else {
8733 // Load with slow path based read barrier.
8734 // /* HeapReference<Object> */ out = *(obj + offset)
8735 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8736 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
8737 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008738 } else {
8739 // Plain load with no read barrier.
8740 // /* HeapReference<Object> */ out = *(obj + offset)
8741 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8742 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
8743 }
8744}
8745
Vladimir Markoca1e0382018-04-11 09:58:41 +00008746void CodeGeneratorARMVIXL::GenerateGcRootFieldLoad(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008747 HInstruction* instruction,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008748 Location root,
8749 vixl32::Register obj,
8750 uint32_t offset,
Artem Serovd4cc5b22016-11-04 11:19:09 +00008751 ReadBarrierOption read_barrier_option) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008752 vixl32::Register root_reg = RegisterFrom(root);
Artem Serovd4cc5b22016-11-04 11:19:09 +00008753 if (read_barrier_option == kWithReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008754 DCHECK(kEmitCompilerReadBarrier);
8755 if (kUseBakerReadBarrier) {
8756 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00008757 // Baker's read barrier are used.
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008758
Vladimir Marko008e09f32018-08-06 15:42:43 +01008759 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
8760 // the Marking Register) to decide whether we need to enter
8761 // the slow path to mark the GC root.
8762 //
8763 // We use shared thunks for the slow path; shared within the method
8764 // for JIT, across methods for AOT. That thunk checks the reference
8765 // and jumps to the entrypoint if needed.
8766 //
8767 // lr = &return_address;
8768 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
8769 // if (mr) { // Thread::Current()->GetIsGcMarking()
8770 // goto gc_root_thunk<root_reg>(lr)
8771 // }
8772 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00008773
Vladimir Marko008e09f32018-08-06 15:42:43 +01008774 UseScratchRegisterScope temps(GetVIXLAssembler());
8775 temps.Exclude(ip);
8776 bool narrow = CanEmitNarrowLdr(root_reg, obj, offset);
8777 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode(), narrow);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008778
Vladimir Markod887ed82018-08-14 13:52:12 +00008779 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u) + /* LDR */ (narrow ? 1u : 0u);
8780 size_t wide_instructions = /* ADR+CMP+LDR+BNE */ 4u - narrow_instructions;
8781 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8782 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8783 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008784 vixl32::Label return_address;
8785 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8786 __ cmp(mr, Operand(0));
8787 // Currently the offset is always within range. If that changes,
8788 // we shall have to split the load the same way as for fields.
8789 DCHECK_LT(offset, kReferenceLoadMinFarOffset);
8790 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
8791 __ ldr(EncodingSize(narrow ? Narrow : Wide), root_reg, MemOperand(obj, offset));
8792 EmitBakerReadBarrierBne(custom_data);
Vladimir Markod887ed82018-08-14 13:52:12 +00008793 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008794 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8795 narrow ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_OFFSET
8796 : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_OFFSET);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008797 } else {
8798 // GC root loaded through a slow path for read barriers other
8799 // than Baker's.
8800 // /* GcRoot<mirror::Object>* */ root = obj + offset
8801 __ Add(root_reg, obj, offset);
8802 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00008803 GenerateReadBarrierForRootSlow(instruction, root, root);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008804 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008805 } else {
8806 // Plain GC root load with no read barrier.
8807 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
8808 GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset);
8809 // Note that GC roots are not affected by heap poisoning, thus we
8810 // do not have to unpoison `root_reg` here.
8811 }
Andra Danciua0130e82020-07-23 12:34:56 +00008812 MaybeGenerateMarkingRegisterCheck(/* code= */ 20);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008813}
8814
Vladimir Marko3d350a82020-11-18 14:14:27 +00008815void CodeGeneratorARMVIXL::GenerateIntrinsicCasMoveWithBakerReadBarrier(
8816 vixl::aarch32::Register marked_old_value,
8817 vixl::aarch32::Register old_value) {
Vladimir Markod887ed82018-08-14 13:52:12 +00008818 DCHECK(kEmitCompilerReadBarrier);
8819 DCHECK(kUseBakerReadBarrier);
8820
Vladimir Marko3d350a82020-11-18 14:14:27 +00008821 // Similar to the Baker RB path in GenerateGcRootFieldLoad(), with a MOV instead of LDR.
8822 // For low registers, we can reuse the GC root narrow entrypoint, for high registers
8823 // we use a specialized entrypoint because the register bits are 8-11 instead of 12-15.
8824 bool narrow_mov = marked_old_value.IsLow();
8825 uint32_t custom_data = narrow_mov
8826 ? EncodeBakerReadBarrierGcRootData(marked_old_value.GetCode(), /*narrow=*/ true)
8827 : EncodeBakerReadBarrierIntrinsicCasData(marked_old_value.GetCode());
Vladimir Markod887ed82018-08-14 13:52:12 +00008828
Vladimir Marko3d350a82020-11-18 14:14:27 +00008829 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u) + /* MOV */ (narrow_mov ? 1u : 0u);
8830 size_t wide_instructions = /* ADR+CMP+MOV+BNE */ 4u - narrow_instructions;
Vladimir Markod887ed82018-08-14 13:52:12 +00008831 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8832 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8833 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
8834 vixl32::Label return_address;
8835 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8836 __ cmp(mr, Operand(0));
8837 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
Vladimir Marko3d350a82020-11-18 14:14:27 +00008838 __ mov(EncodingSize(narrow_mov ? Narrow : Wide), marked_old_value, old_value);
Vladimir Markod887ed82018-08-14 13:52:12 +00008839 EmitBakerReadBarrierBne(custom_data);
8840 __ bind(&return_address);
8841 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
Vladimir Marko3d350a82020-11-18 14:14:27 +00008842 narrow_mov
8843 ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_OFFSET
8844 : BAKER_MARK_INTROSPECTION_INTRINSIC_CAS_MOV_OFFSET);
Vladimir Markod887ed82018-08-14 13:52:12 +00008845}
8846
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008847void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8848 Location ref,
8849 vixl32::Register obj,
Vladimir Marko248141f2018-08-10 10:40:07 +01008850 const vixl32::MemOperand& src,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008851 bool needs_null_check) {
8852 DCHECK(kEmitCompilerReadBarrier);
8853 DCHECK(kUseBakerReadBarrier);
8854
Vladimir Marko008e09f32018-08-06 15:42:43 +01008855 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
8856 // Marking Register) to decide whether we need to enter the slow
8857 // path to mark the reference. Then, in the slow path, check the
8858 // gray bit in the lock word of the reference's holder (`obj`) to
8859 // decide whether to mark `ref` or not.
8860 //
8861 // We use shared thunks for the slow path; shared within the method
8862 // for JIT, across methods for AOT. That thunk checks the holder
8863 // and jumps to the entrypoint if needed. If the holder is not gray,
8864 // it creates a fake dependency and returns to the LDR instruction.
8865 //
8866 // lr = &gray_return_address;
8867 // if (mr) { // Thread::Current()->GetIsGcMarking()
8868 // goto field_thunk<holder_reg, base_reg>(lr)
8869 // }
8870 // not_gray_return_address:
8871 // // Original reference load. If the offset is too large to fit
8872 // // into LDR, we use an adjusted base register here.
8873 // HeapReference<mirror::Object> reference = *(obj+offset);
8874 // gray_return_address:
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008875
Vladimir Marko248141f2018-08-10 10:40:07 +01008876 DCHECK(src.GetAddrMode() == vixl32::Offset);
8877 DCHECK_ALIGNED(src.GetOffsetImmediate(), sizeof(mirror::HeapReference<mirror::Object>));
Vladimir Marko008e09f32018-08-06 15:42:43 +01008878 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko248141f2018-08-10 10:40:07 +01008879 bool narrow = CanEmitNarrowLdr(ref_reg, src.GetBaseRegister(), src.GetOffsetImmediate());
8880
Vladimir Marko008e09f32018-08-06 15:42:43 +01008881 UseScratchRegisterScope temps(GetVIXLAssembler());
8882 temps.Exclude(ip);
Vladimir Marko248141f2018-08-10 10:40:07 +01008883 uint32_t custom_data =
8884 EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode(), narrow);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008885
Vladimir Marko008e09f32018-08-06 15:42:43 +01008886 {
Vladimir Markod887ed82018-08-14 13:52:12 +00008887 size_t narrow_instructions =
8888 /* CMP */ (mr.IsLow() ? 1u : 0u) +
8889 /* LDR+unpoison? */ (narrow ? (kPoisonHeapReferences ? 2u : 1u) : 0u);
8890 size_t wide_instructions =
8891 /* ADR+CMP+LDR+BNE+unpoison? */ (kPoisonHeapReferences ? 5u : 4u) - narrow_instructions;
8892 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8893 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8894 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008895 vixl32::Label return_address;
8896 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8897 __ cmp(mr, Operand(0));
8898 EmitBakerReadBarrierBne(custom_data);
8899 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
Vladimir Marko248141f2018-08-10 10:40:07 +01008900 __ ldr(EncodingSize(narrow ? Narrow : Wide), ref_reg, src);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008901 if (needs_null_check) {
8902 MaybeRecordImplicitNullCheck(instruction);
8903 }
8904 // Note: We need a specific width for the unpoisoning NEG.
8905 if (kPoisonHeapReferences) {
8906 if (narrow) {
8907 // The only 16-bit encoding is T1 which sets flags outside IT block (i.e. RSBS, not RSB).
8908 __ rsbs(EncodingSize(Narrow), ref_reg, ref_reg, Operand(0));
8909 } else {
8910 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0));
8911 }
8912 }
Vladimir Markod887ed82018-08-14 13:52:12 +00008913 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008914 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8915 narrow ? BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET
8916 : BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET);
8917 }
Andra Danciua0130e82020-07-23 12:34:56 +00008918 MaybeGenerateMarkingRegisterCheck(/* code= */ 21, /* temp_loc= */ LocationFrom(ip));
Roland Levillain6070e882016-11-03 17:51:58 +00008919}
8920
Vladimir Marko248141f2018-08-10 10:40:07 +01008921void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8922 Location ref,
8923 vixl32::Register obj,
8924 uint32_t offset,
Vladimir Marko01b65522020-10-28 15:43:54 +00008925 Location maybe_temp,
Vladimir Marko248141f2018-08-10 10:40:07 +01008926 bool needs_null_check) {
8927 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
8928 vixl32::Register base = obj;
8929 if (offset >= kReferenceLoadMinFarOffset) {
Vladimir Marko01b65522020-10-28 15:43:54 +00008930 base = RegisterFrom(maybe_temp);
Vladimir Marko248141f2018-08-10 10:40:07 +01008931 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
8932 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
8933 offset &= (kReferenceLoadMinFarOffset - 1u);
8934 }
8935 GenerateFieldLoadWithBakerReadBarrier(
8936 instruction, ref, obj, MemOperand(base, offset), needs_null_check);
8937}
8938
Vladimir Marko008e09f32018-08-06 15:42:43 +01008939void CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008940 vixl32::Register obj,
8941 uint32_t data_offset,
8942 Location index,
8943 Location temp,
8944 bool needs_null_check) {
8945 DCHECK(kEmitCompilerReadBarrier);
8946 DCHECK(kUseBakerReadBarrier);
8947
8948 static_assert(
8949 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
8950 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008951 ScaleFactor scale_factor = TIMES_4;
8952
Vladimir Marko008e09f32018-08-06 15:42:43 +01008953 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
8954 // Marking Register) to decide whether we need to enter the slow
8955 // path to mark the reference. Then, in the slow path, check the
8956 // gray bit in the lock word of the reference's holder (`obj`) to
8957 // decide whether to mark `ref` or not.
8958 //
8959 // We use shared thunks for the slow path; shared within the method
8960 // for JIT, across methods for AOT. That thunk checks the holder
8961 // and jumps to the entrypoint if needed. If the holder is not gray,
8962 // it creates a fake dependency and returns to the LDR instruction.
8963 //
8964 // lr = &gray_return_address;
8965 // if (mr) { // Thread::Current()->GetIsGcMarking()
8966 // goto array_thunk<base_reg>(lr)
8967 // }
8968 // not_gray_return_address:
8969 // // Original reference load. If the offset is too large to fit
8970 // // into LDR, we use an adjusted base register here.
8971 // HeapReference<mirror::Object> reference = data[index];
8972 // gray_return_address:
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008973
Vladimir Marko008e09f32018-08-06 15:42:43 +01008974 DCHECK(index.IsValid());
8975 vixl32::Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
8976 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
8977 vixl32::Register data_reg = RegisterFrom(temp, DataType::Type::kInt32); // Raw pointer.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008978
Vladimir Marko008e09f32018-08-06 15:42:43 +01008979 UseScratchRegisterScope temps(GetVIXLAssembler());
8980 temps.Exclude(ip);
8981 uint32_t custom_data = EncodeBakerReadBarrierArrayData(data_reg.GetCode());
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008982
Vladimir Marko008e09f32018-08-06 15:42:43 +01008983 __ Add(data_reg, obj, Operand(data_offset));
8984 {
Vladimir Markod887ed82018-08-14 13:52:12 +00008985 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u);
8986 size_t wide_instructions =
8987 /* ADR+CMP+BNE+LDR+unpoison? */ (kPoisonHeapReferences ? 5u : 4u) - narrow_instructions;
8988 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8989 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8990 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008991 vixl32::Label return_address;
8992 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8993 __ cmp(mr, Operand(0));
8994 EmitBakerReadBarrierBne(custom_data);
8995 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
8996 __ ldr(ref_reg, MemOperand(data_reg, index_reg, vixl32::LSL, scale_factor));
8997 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
8998 // Note: We need a Wide NEG for the unpoisoning.
8999 if (kPoisonHeapReferences) {
9000 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009001 }
Vladimir Markod887ed82018-08-14 13:52:12 +00009002 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01009003 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
9004 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009005 }
Andra Danciua0130e82020-07-23 12:34:56 +00009006 MaybeGenerateMarkingRegisterCheck(/* code= */ 22, /* temp_loc= */ LocationFrom(ip));
Roland Levillain6070e882016-11-03 17:51:58 +00009007}
9008
Roland Levillain5daa4952017-07-03 17:23:56 +01009009void CodeGeneratorARMVIXL::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
9010 // The following condition is a compile-time one, so it does not have a run-time cost.
9011 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
9012 // The following condition is a run-time one; it is executed after the
9013 // previous compile-time test, to avoid penalizing non-debug builds.
9014 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
9015 UseScratchRegisterScope temps(GetVIXLAssembler());
9016 vixl32::Register temp = temp_loc.IsValid() ? RegisterFrom(temp_loc) : temps.Acquire();
9017 GetAssembler()->GenerateMarkingRegisterCheck(temp,
9018 kMarkingRegisterCheckBreakCodeBaseCode + code);
9019 }
9020 }
9021}
9022
Vladimir Marko3d350a82020-11-18 14:14:27 +00009023SlowPathCodeARMVIXL* CodeGeneratorARMVIXL::AddReadBarrierSlowPath(HInstruction* instruction,
9024 Location out,
9025 Location ref,
9026 Location obj,
9027 uint32_t offset,
9028 Location index) {
9029 SlowPathCodeARMVIXL* slow_path = new (GetScopedAllocator())
9030 ReadBarrierForHeapReferenceSlowPathARMVIXL(instruction, out, ref, obj, offset, index);
9031 AddSlowPath(slow_path);
9032 return slow_path;
9033}
9034
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009035void CodeGeneratorARMVIXL::GenerateReadBarrierSlow(HInstruction* instruction,
9036 Location out,
9037 Location ref,
9038 Location obj,
9039 uint32_t offset,
9040 Location index) {
9041 DCHECK(kEmitCompilerReadBarrier);
9042
9043 // Insert a slow path based read barrier *after* the reference load.
9044 //
9045 // If heap poisoning is enabled, the unpoisoning of the loaded
9046 // reference will be carried out by the runtime within the slow
9047 // path.
9048 //
9049 // Note that `ref` currently does not get unpoisoned (when heap
9050 // poisoning is enabled), which is alright as the `ref` argument is
9051 // not used by the artReadBarrierSlow entry point.
9052 //
9053 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko3d350a82020-11-18 14:14:27 +00009054 SlowPathCodeARMVIXL* slow_path =
9055 AddReadBarrierSlowPath(instruction, out, ref, obj, offset, index);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009056
9057 __ B(slow_path->GetEntryLabel());
9058 __ Bind(slow_path->GetExitLabel());
9059}
9060
9061void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
Artem Serov02d37832016-10-25 15:25:33 +01009062 Location out,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009063 Location ref,
9064 Location obj,
9065 uint32_t offset,
9066 Location index) {
Artem Serov02d37832016-10-25 15:25:33 +01009067 if (kEmitCompilerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009068 // Baker's read barriers shall be handled by the fast path
Roland Levillain9983e302017-07-14 14:34:22 +01009069 // (CodeGeneratorARMVIXL::GenerateReferenceLoadWithBakerReadBarrier).
Artem Serov02d37832016-10-25 15:25:33 +01009070 DCHECK(!kUseBakerReadBarrier);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009071 // If heap poisoning is enabled, unpoisoning will be taken care of
9072 // by the runtime within the slow path.
9073 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Artem Serov02d37832016-10-25 15:25:33 +01009074 } else if (kPoisonHeapReferences) {
9075 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
9076 }
9077}
9078
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009079void CodeGeneratorARMVIXL::GenerateReadBarrierForRootSlow(HInstruction* instruction,
9080 Location out,
9081 Location root) {
9082 DCHECK(kEmitCompilerReadBarrier);
9083
9084 // Insert a slow path based read barrier *after* the GC root load.
9085 //
9086 // Note that GC roots are not affected by heap poisoning, so we do
9087 // not need to do anything special for this here.
9088 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01009089 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARMVIXL(instruction, out, root);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009090 AddSlowPath(slow_path);
9091
9092 __ B(slow_path->GetEntryLabel());
9093 __ Bind(slow_path->GetExitLabel());
9094}
9095
Artem Serov02d37832016-10-25 15:25:33 +01009096// Check if the desired_dispatch_info is supported. If it is, return it,
9097// otherwise return a fall-back info that should be used instead.
9098HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
Artem Serovd4cc5b22016-11-04 11:19:09 +00009099 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Vladimir Marko86c87522020-05-11 16:55:55 +01009100 ArtMethod* method) {
Vladimir Markod3e9c622020-08-05 12:20:28 +01009101 if (method->IsIntrinsic() &&
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009102 desired_dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
Vladimir Markod3e9c622020-08-05 12:20:28 +01009103 // As a work-around for soft-float native ABI interfering with type checks, we are
9104 // inserting fake calls to Float.floatToRawIntBits() or Double.doubleToRawLongBits()
9105 // when a float or double argument is passed in core registers but we cannot do that
9106 // for actual intrinsic implementations that expect them in FP registers. Therefore
9107 // we do not use `kCallCriticalNative` for intrinsics with FP arguments; if they are
9108 // properly intrinsified, the dispatch type does not matter anyway.
Vladimir Marko86c87522020-05-11 16:55:55 +01009109 ScopedObjectAccess soa(Thread::Current());
9110 uint32_t shorty_len;
9111 const char* shorty = method->GetShorty(&shorty_len);
Vladimir Marko86c87522020-05-11 16:55:55 +01009112 for (uint32_t i = 1; i != shorty_len; ++i) {
Vladimir Marko86c87522020-05-11 16:55:55 +01009113 if (shorty[i] == 'D' || shorty[i] == 'F') {
9114 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009115 dispatch_info.code_ptr_location = CodePtrLocation::kCallArtMethod;
Vladimir Marko86c87522020-05-11 16:55:55 +01009116 return dispatch_info;
9117 }
Vladimir Marko86c87522020-05-11 16:55:55 +01009118 }
9119 }
Nicolas Geoffraye807ff72017-01-23 09:03:12 +00009120 return desired_dispatch_info;
Artem Serov02d37832016-10-25 15:25:33 +01009121}
9122
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01009123
9124void CodeGeneratorARMVIXL::LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke) {
9125 switch (load_kind) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009126 case MethodLoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01009127 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01009128 PcRelativePatchInfo* labels = NewBootImageMethodPatch(invoke->GetResolvedMethodReference());
Vladimir Marko65979462017-05-19 17:25:12 +01009129 vixl32::Register temp_reg = RegisterFrom(temp);
9130 EmitMovwMovtPlaceholder(labels, temp_reg);
9131 break;
9132 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009133 case MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00009134 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00009135 PcRelativePatchInfo* labels = NewBootImageRelRoPatch(boot_image_offset);
9136 vixl32::Register temp_reg = RegisterFrom(temp);
9137 EmitMovwMovtPlaceholder(labels, temp_reg);
9138 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
9139 break;
9140 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009141 case MethodLoadKind::kBssEntry: {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01009142 PcRelativePatchInfo* labels = NewMethodBssEntryPatch(invoke->GetMethodReference());
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009143 vixl32::Register temp_reg = RegisterFrom(temp);
9144 EmitMovwMovtPlaceholder(labels, temp_reg);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01009145 // All aligned loads are implicitly atomic consume operations on ARM.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009146 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009147 break;
9148 }
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01009149 case MethodLoadKind::kJitDirectAddress: {
9150 __ Mov(RegisterFrom(temp), Operand::From(invoke->GetResolvedMethod()));
Vladimir Marko8e524ad2018-07-13 10:27:43 +01009151 break;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01009152 }
9153 case MethodLoadKind::kRuntimeCall: {
9154 // Test situation, don't do anything.
9155 break;
9156 }
9157 default: {
9158 LOG(FATAL) << "Load kind should have already been handled " << load_kind;
9159 UNREACHABLE();
9160 }
9161 }
9162}
9163
9164void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
9165 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
9166 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
9167 switch (invoke->GetMethodLoadKind()) {
9168 case MethodLoadKind::kStringInit: {
9169 uint32_t offset =
9170 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
9171 // temp = thread->string_init_entrypoint
9172 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(temp), tr, offset);
9173 break;
9174 }
9175 case MethodLoadKind::kRecursive: {
9176 callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodIndex());
9177 break;
9178 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009179 case MethodLoadKind::kRuntimeCall: {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009180 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
9181 return; // No code pointer retrieval; the runtime performs the call directly.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009182 }
Vladimir Markoeb9eb002020-10-02 13:54:19 +01009183 case MethodLoadKind::kBootImageLinkTimePcRelative:
9184 // Note: Unlike arm64, x86 and x86-64, we do not avoid the materialization of method
9185 // pointer for kCallCriticalNative because it would not save us an instruction from
9186 // the current sequence MOVW+MOVT+ADD(pc)+LDR+BL. The ADD(pc) separates the patched
9187 // offset instructions MOVW+MOVT from the entrypoint load, so they cannot be fused.
9188 FALLTHROUGH_INTENDED;
Nicolas Geoffray8d34a182020-09-16 09:46:58 +01009189 default: {
9190 LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
9191 break;
9192 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009193 }
9194
Vladimir Marko86c87522020-05-11 16:55:55 +01009195 auto call_code_pointer_member = [&](MemberOffset offset) {
9196 // LR = callee_method->member;
9197 GetAssembler()->LoadFromOffset(kLoadWord, lr, RegisterFrom(callee_method), offset.Int32Value());
9198 {
9199 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
9200 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
9201 ExactAssemblyScope aas(GetVIXLAssembler(),
9202 vixl32::k16BitT32InstructionSizeInBytes,
9203 CodeBufferCheckScope::kExactSize);
9204 // LR()
9205 __ blx(lr);
9206 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
9207 }
9208 };
Artem Serovd4cc5b22016-11-04 11:19:09 +00009209 switch (invoke->GetCodePtrLocation()) {
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009210 case CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009211 {
9212 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
9213 ExactAssemblyScope aas(GetVIXLAssembler(),
9214 vixl32::k32BitT32InstructionSizeInBytes,
9215 CodeBufferCheckScope::kMaximumSize);
9216 __ bl(GetFrameEntryLabel());
9217 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
9218 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00009219 break;
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009220 case CodePtrLocation::kCallCriticalNative: {
Vladimir Marko86c87522020-05-11 16:55:55 +01009221 size_t out_frame_size =
9222 PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorARMVIXL,
9223 kAapcsStackAlignment,
Vladimir Markodec78172020-06-19 15:31:23 +01009224 GetCriticalNativeDirectCallFrameSize>(invoke);
Vladimir Marko86c87522020-05-11 16:55:55 +01009225 call_code_pointer_member(ArtMethod::EntryPointFromJniOffset(kArmPointerSize));
9226 // Move the result when needed due to native and managed ABI mismatch.
9227 switch (invoke->GetType()) {
9228 case DataType::Type::kFloat32:
9229 __ Vmov(s0, r0);
9230 break;
9231 case DataType::Type::kFloat64:
9232 __ Vmov(d0, r0, r1);
9233 break;
9234 case DataType::Type::kBool:
9235 case DataType::Type::kInt8:
9236 case DataType::Type::kUint16:
9237 case DataType::Type::kInt16:
9238 case DataType::Type::kInt32:
9239 case DataType::Type::kInt64:
9240 case DataType::Type::kVoid:
9241 break;
9242 default:
9243 DCHECK(false) << invoke->GetType();
9244 break;
9245 }
9246 if (out_frame_size != 0u) {
Vladimir Markodec78172020-06-19 15:31:23 +01009247 DecreaseFrame(out_frame_size);
Vladimir Marko86c87522020-05-11 16:55:55 +01009248 }
9249 break;
9250 }
Nicolas Geoffray6d69b522020-09-23 14:47:28 +01009251 case CodePtrLocation::kCallArtMethod:
Vladimir Marko86c87522020-05-11 16:55:55 +01009252 call_code_pointer_member(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize));
Artem Serovd4cc5b22016-11-04 11:19:09 +00009253 break;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009254 }
9255
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009256 DCHECK(!IsLeafMethod());
9257}
9258
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009259void CodeGeneratorARMVIXL::GenerateVirtualCall(
9260 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009261 vixl32::Register temp = RegisterFrom(temp_location);
9262 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
9263 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
9264
9265 // Use the calling convention instead of the location of the receiver, as
9266 // intrinsics may have put the receiver in a different register. In the intrinsics
9267 // slow path, the arguments have been moved to the right place, so here we are
9268 // guaranteed that the receiver is the first register of the calling convention.
9269 InvokeDexCallingConventionARMVIXL calling_convention;
9270 vixl32::Register receiver = calling_convention.GetRegisterAt(0);
9271 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Alexandre Rames374ddf32016-11-04 10:40:49 +00009272 {
9273 // Make sure the pc is recorded immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00009274 ExactAssemblyScope aas(GetVIXLAssembler(),
9275 vixl32::kMaxInstructionSizeInBytes,
9276 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00009277 // /* HeapReference<Class> */ temp = receiver->klass_
9278 __ ldr(temp, MemOperand(receiver, class_offset));
9279 MaybeRecordImplicitNullCheck(invoke);
9280 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009281 // Instead of simply (possibly) unpoisoning `temp` here, we should
9282 // emit a read barrier for the previous class reference load.
9283 // However this is not required in practice, as this is an
9284 // intermediate/temporary reference and because the current
9285 // concurrent copying collector keeps the from-space memory
9286 // intact/accessible until the end of the marking phase (the
9287 // concurrent copying collector may not in the future).
9288 GetAssembler()->MaybeUnpoisonHeapReference(temp);
9289
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00009290 // If we're compiling baseline, update the inline cache.
9291 MaybeGenerateInlineCacheCheck(invoke, temp);
9292
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009293 // temp = temp->GetMethodAt(method_offset);
9294 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
9295 kArmPointerSize).Int32Value();
9296 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
9297 // LR = temp->GetEntryPoint();
9298 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009299 {
9300 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
9301 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
9302 ExactAssemblyScope aas(GetVIXLAssembler(),
9303 vixl32::k16BitT32InstructionSizeInBytes,
9304 CodeBufferCheckScope::kExactSize);
9305 // LR();
9306 __ blx(lr);
9307 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
9308 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009309}
9310
Vladimir Marko6fd16062018-06-26 11:02:04 +01009311CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageIntrinsicPatch(
9312 uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01009313 return NewPcRelativePatch(/* dex_file= */ nullptr, intrinsic_data, &boot_image_other_patches_);
Vladimir Marko6fd16062018-06-26 11:02:04 +01009314}
9315
Vladimir Markob066d432018-01-03 13:14:37 +00009316CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageRelRoPatch(
9317 uint32_t boot_image_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08009318 return NewPcRelativePatch(/* dex_file= */ nullptr,
Vladimir Markob066d432018-01-03 13:14:37 +00009319 boot_image_offset,
Vladimir Marko2d06e022019-07-08 15:45:19 +01009320 &boot_image_other_patches_);
Vladimir Markob066d432018-01-03 13:14:37 +00009321}
9322
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009323CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01009324 MethodReference target_method) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009325 return NewPcRelativePatch(
9326 target_method.dex_file, target_method.index, &boot_image_method_patches_);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009327}
9328
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009329CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewMethodBssEntryPatch(
9330 MethodReference target_method) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009331 return NewPcRelativePatch(
9332 target_method.dex_file, target_method.index, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009333}
9334
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009335CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageTypePatch(
Artem Serovd4cc5b22016-11-04 11:19:09 +00009336 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009337 return NewPcRelativePatch(&dex_file, type_index.index_, &boot_image_type_patches_);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009338}
9339
Vladimir Marko1998cd02017-01-13 13:02:58 +00009340CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewTypeBssEntryPatch(
Vladimir Marko8f63f102020-09-28 12:10:28 +01009341 HLoadClass* load_class) {
9342 const DexFile& dex_file = load_class->GetDexFile();
9343 dex::TypeIndex type_index = load_class->GetTypeIndex();
9344 ArenaDeque<PcRelativePatchInfo>* patches = nullptr;
9345 switch (load_class->GetLoadKind()) {
9346 case HLoadClass::LoadKind::kBssEntry:
9347 patches = &type_bss_entry_patches_;
9348 break;
9349 case HLoadClass::LoadKind::kBssEntryPublic:
9350 patches = &public_type_bss_entry_patches_;
9351 break;
9352 case HLoadClass::LoadKind::kBssEntryPackage:
9353 patches = &package_type_bss_entry_patches_;
9354 break;
9355 default:
9356 LOG(FATAL) << "Unexpected load kind: " << load_class->GetLoadKind();
9357 UNREACHABLE();
9358 }
Vladimir Markobaade402020-09-30 14:45:39 +00009359 return NewPcRelativePatch(&dex_file, type_index.index_, patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00009360}
9361
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009362CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01009363 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009364 return NewPcRelativePatch(&dex_file, string_index.index_, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01009365}
9366
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01009367CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewStringBssEntryPatch(
9368 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009369 return NewPcRelativePatch(&dex_file, string_index.index_, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01009370}
9371
Artem Serovd4cc5b22016-11-04 11:19:09 +00009372CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009373 const DexFile* dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00009374 patches->emplace_back(dex_file, offset_or_index);
9375 return &patches->back();
9376}
9377
Vladimir Markof6675082019-05-17 12:05:28 +01009378void CodeGeneratorARMVIXL::EmitEntrypointThunkCall(ThreadOffset32 entrypoint_offset) {
9379 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko695348f2020-05-19 14:42:02 +01009380 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markof6675082019-05-17 12:05:28 +01009381 call_entrypoint_patches_.emplace_back(/*dex_file*/ nullptr, entrypoint_offset.Uint32Value());
9382 vixl::aarch32::Label* bl_label = &call_entrypoint_patches_.back().label;
9383 __ bind(bl_label);
9384 vixl32::Label placeholder_label;
9385 __ bl(&placeholder_label); // Placeholder, patched at link-time.
9386 __ bind(&placeholder_label);
9387}
9388
Vladimir Marko966b46f2018-08-03 10:20:19 +00009389void CodeGeneratorARMVIXL::EmitBakerReadBarrierBne(uint32_t custom_data) {
Vladimir Markod887ed82018-08-14 13:52:12 +00009390 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko695348f2020-05-19 14:42:02 +01009391 if (GetCompilerOptions().IsJitCompiler()) {
Vladimir Marko966b46f2018-08-03 10:20:19 +00009392 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
9393 vixl::aarch32::Label* slow_path_entry = &it->second.label;
9394 __ b(ne, EncodingSize(Wide), slow_path_entry);
9395 } else {
9396 baker_read_barrier_patches_.emplace_back(custom_data);
9397 vixl::aarch32::Label* patch_label = &baker_read_barrier_patches_.back().label;
9398 __ bind(patch_label);
9399 vixl32::Label placeholder_label;
9400 __ b(ne, EncodingSize(Wide), &placeholder_label); // Placeholder, patched at link-time.
9401 __ bind(&placeholder_label);
9402 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009403}
9404
Artem Serovc5fcb442016-12-02 19:19:58 +00009405VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateBootImageAddressLiteral(uint32_t address) {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01009406 return DeduplicateUint32Literal(address, &uint32_literals_);
Artem Serovc5fcb442016-12-02 19:19:58 +00009407}
9408
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00009409VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitStringLiteral(
9410 const DexFile& dex_file,
9411 dex::StringIndex string_index,
9412 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01009413 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Artem Serovc5fcb442016-12-02 19:19:58 +00009414 return jit_string_patches_.GetOrCreate(
9415 StringReference(&dex_file, string_index),
9416 [this]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08009417 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
Artem Serovc5fcb442016-12-02 19:19:58 +00009418 });
9419}
9420
9421VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFile& dex_file,
9422 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00009423 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01009424 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Artem Serovc5fcb442016-12-02 19:19:58 +00009425 return jit_class_patches_.GetOrCreate(
9426 TypeReference(&dex_file, type_index),
9427 [this]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08009428 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
Artem Serovc5fcb442016-12-02 19:19:58 +00009429 });
9430}
9431
Vladimir Marko6fd16062018-06-26 11:02:04 +01009432void CodeGeneratorARMVIXL::LoadBootImageAddress(vixl32::Register reg,
9433 uint32_t boot_image_reference) {
9434 if (GetCompilerOptions().IsBootImage()) {
9435 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
9436 NewBootImageIntrinsicPatch(boot_image_reference);
9437 EmitMovwMovtPlaceholder(labels, reg);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01009438 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01009439 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
9440 NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01009441 EmitMovwMovtPlaceholder(labels, reg);
Andreas Gampe3db70682018-12-26 15:12:03 -08009442 __ Ldr(reg, MemOperand(reg, /* offset= */ 0));
Vladimir Markoeebb8212018-06-05 14:57:24 +01009443 } else {
Vladimir Marko695348f2020-05-19 14:42:02 +01009444 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markoeebb8212018-06-05 14:57:24 +01009445 gc::Heap* heap = Runtime::Current()->GetHeap();
9446 DCHECK(!heap->GetBootImageSpaces().empty());
9447 uintptr_t address =
Vladimir Marko6fd16062018-06-26 11:02:04 +01009448 reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01009449 __ Ldr(reg, DeduplicateBootImageAddressLiteral(dchecked_integral_cast<uint32_t>(address)));
9450 }
9451}
9452
Vladimir Marko7968cae2021-01-19 12:02:35 +00009453void CodeGeneratorARMVIXL::LoadTypeForBootImageIntrinsic(vixl::aarch32::Register reg,
9454 TypeReference target_type) {
9455 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
9456 DCHECK(GetCompilerOptions().IsBootImage());
9457 PcRelativePatchInfo* labels =
9458 NewBootImageTypePatch(*target_type.dex_file, target_type.TypeIndex());
9459 EmitMovwMovtPlaceholder(labels, reg);
9460}
9461
Vladimir Markode91ca92020-10-27 13:41:40 +00009462void CodeGeneratorARMVIXL::LoadIntrinsicDeclaringClass(vixl32::Register reg, HInvoke* invoke) {
9463 DCHECK_NE(invoke->GetIntrinsic(), Intrinsics::kNone);
Vladimir Marko6fd16062018-06-26 11:02:04 +01009464 if (GetCompilerOptions().IsBootImage()) {
Nicolas Geoffraye6c0f2a2020-09-07 08:30:52 +01009465 MethodReference target_method = invoke->GetResolvedMethodReference();
Vladimir Marko6fd16062018-06-26 11:02:04 +01009466 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
Vladimir Marko7968cae2021-01-19 12:02:35 +00009467 LoadTypeForBootImageIntrinsic(reg, TypeReference(target_method.dex_file, type_idx));
Vladimir Marko6fd16062018-06-26 11:02:04 +01009468 } else {
Vladimir Markode91ca92020-10-27 13:41:40 +00009469 uint32_t boot_image_offset = GetBootImageOffsetOfIntrinsicDeclaringClass(invoke);
9470 LoadBootImageAddress(reg, boot_image_offset);
Vladimir Marko6fd16062018-06-26 11:02:04 +01009471 }
Vladimir Marko6fd16062018-06-26 11:02:04 +01009472}
9473
Vladimir Marko7968cae2021-01-19 12:02:35 +00009474void CodeGeneratorARMVIXL::LoadClassRootForIntrinsic(vixl::aarch32::Register reg,
9475 ClassRoot class_root) {
9476 if (GetCompilerOptions().IsBootImage()) {
9477 ScopedObjectAccess soa(Thread::Current());
9478 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
9479 TypeReference target_type(&klass->GetDexFile(), klass->GetDexTypeIndex());
9480 LoadTypeForBootImageIntrinsic(reg, target_type);
9481 } else {
9482 uint32_t boot_image_offset = GetBootImageOffset(class_root);
9483 LoadBootImageAddress(reg, boot_image_offset);
9484 }
9485}
9486
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009487template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Artem Serovd4cc5b22016-11-04 11:19:09 +00009488inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
9489 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009490 ArenaVector<linker::LinkerPatch>* linker_patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00009491 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009492 const DexFile* dex_file = info.target_dex_file;
Artem Serovd4cc5b22016-11-04 11:19:09 +00009493 size_t offset_or_index = info.offset_or_index;
9494 DCHECK(info.add_pc_label.IsBound());
9495 uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.GetLocation());
9496 // Add MOVW patch.
9497 DCHECK(info.movw_label.IsBound());
9498 uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.GetLocation());
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009499 linker_patches->push_back(Factory(movw_offset, dex_file, add_pc_offset, offset_or_index));
Artem Serovd4cc5b22016-11-04 11:19:09 +00009500 // Add MOVT patch.
9501 DCHECK(info.movt_label.IsBound());
9502 uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.GetLocation());
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009503 linker_patches->push_back(Factory(movt_offset, dex_file, add_pc_offset, offset_or_index));
Artem Serovd4cc5b22016-11-04 11:19:09 +00009504 }
9505}
9506
Vladimir Marko6fd16062018-06-26 11:02:04 +01009507template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
9508linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
9509 const DexFile* target_dex_file,
9510 uint32_t pc_insn_offset,
9511 uint32_t boot_image_offset) {
9512 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
9513 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00009514}
9515
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009516void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00009517 DCHECK(linker_patches->empty());
9518 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009519 /* MOVW+MOVT for each entry */ 2u * boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009520 /* MOVW+MOVT for each entry */ 2u * method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009521 /* MOVW+MOVT for each entry */ 2u * boot_image_type_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009522 /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
Vladimir Marko8f63f102020-09-28 12:10:28 +01009523 /* MOVW+MOVT for each entry */ 2u * public_type_bss_entry_patches_.size() +
9524 /* MOVW+MOVT for each entry */ 2u * package_type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009525 /* MOVW+MOVT for each entry */ 2u * boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01009526 /* MOVW+MOVT for each entry */ 2u * string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01009527 /* MOVW+MOVT for each entry */ 2u * boot_image_other_patches_.size() +
Vladimir Markof6675082019-05-17 12:05:28 +01009528 call_entrypoint_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009529 baker_read_barrier_patches_.size();
Artem Serovd4cc5b22016-11-04 11:19:09 +00009530 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01009531 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009532 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009533 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009534 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009535 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009536 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009537 boot_image_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01009538 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01009539 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00009540 DCHECK(boot_image_type_patches_.empty());
9541 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01009542 }
9543 if (GetCompilerOptions().IsBootImage()) {
9544 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
9545 boot_image_other_patches_, linker_patches);
9546 } else {
9547 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
9548 boot_image_other_patches_, linker_patches);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009549 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009550 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
9551 method_bss_entry_patches_, linker_patches);
9552 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
9553 type_bss_entry_patches_, linker_patches);
Vladimir Marko8f63f102020-09-28 12:10:28 +01009554 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PublicTypeBssEntryPatch>(
9555 public_type_bss_entry_patches_, linker_patches);
9556 EmitPcRelativeLinkerPatches<linker::LinkerPatch::PackageTypeBssEntryPatch>(
9557 package_type_bss_entry_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009558 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
9559 string_bss_entry_patches_, linker_patches);
Vladimir Markof6675082019-05-17 12:05:28 +01009560 for (const PatchInfo<vixl32::Label>& info : call_entrypoint_patches_) {
9561 DCHECK(info.target_dex_file == nullptr);
9562 linker_patches->push_back(linker::LinkerPatch::CallEntrypointPatch(
9563 info.label.GetLocation(), info.offset_or_index));
9564 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009565 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009566 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
9567 info.label.GetLocation(), info.custom_data));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009568 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00009569 DCHECK_EQ(size, linker_patches->size());
Artem Serovc5fcb442016-12-02 19:19:58 +00009570}
9571
Vladimir Markoca1e0382018-04-11 09:58:41 +00009572bool CodeGeneratorARMVIXL::NeedsThunkCode(const linker::LinkerPatch& patch) const {
Vladimir Markof6675082019-05-17 12:05:28 +01009573 return patch.GetType() == linker::LinkerPatch::Type::kCallEntrypoint ||
9574 patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
Vladimir Markoca1e0382018-04-11 09:58:41 +00009575 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
9576}
9577
9578void CodeGeneratorARMVIXL::EmitThunkCode(const linker::LinkerPatch& patch,
9579 /*out*/ ArenaVector<uint8_t>* code,
9580 /*out*/ std::string* debug_name) {
9581 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator());
9582 switch (patch.GetType()) {
Vladimir Markof6675082019-05-17 12:05:28 +01009583 case linker::LinkerPatch::Type::kCallRelative: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009584 // The thunk just uses the entry point in the ArtMethod. This works even for calls
9585 // to the generic JNI and interpreter trampolines.
Vladimir Markof6675082019-05-17 12:05:28 +01009586 MemberOffset offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize);
9587 assembler.LoadFromOffset(arm::kLoadWord, vixl32::pc, vixl32::r0, offset.Int32Value());
Vladimir Markoca1e0382018-04-11 09:58:41 +00009588 assembler.GetVIXLAssembler()->Bkpt(0);
9589 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
9590 *debug_name = "MethodCallThunk";
9591 }
9592 break;
Vladimir Markof6675082019-05-17 12:05:28 +01009593 }
9594 case linker::LinkerPatch::Type::kCallEntrypoint: {
9595 assembler.LoadFromOffset(arm::kLoadWord, vixl32::pc, tr, patch.EntrypointOffset());
9596 assembler.GetVIXLAssembler()->Bkpt(0);
9597 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
9598 *debug_name = "EntrypointCallThunk_" + std::to_string(patch.EntrypointOffset());
9599 }
9600 break;
9601 }
9602 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009603 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
9604 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
9605 break;
Vladimir Markof6675082019-05-17 12:05:28 +01009606 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00009607 default:
9608 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
9609 UNREACHABLE();
9610 }
9611
9612 // Ensure we emit the literal pool if any.
9613 assembler.FinalizeCode();
9614 code->resize(assembler.CodeSize());
9615 MemoryRegion code_region(code->data(), code->size());
9616 assembler.FinalizeInstructions(code_region);
9617}
9618
Artem Serovc5fcb442016-12-02 19:19:58 +00009619VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateUint32Literal(
9620 uint32_t value,
9621 Uint32ToLiteralMap* map) {
9622 return map->GetOrCreate(
9623 value,
9624 [this, value]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08009625 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ value);
Artem Serovc5fcb442016-12-02 19:19:58 +00009626 });
9627}
9628
Artem Serov2bbc9532016-10-21 11:51:50 +01009629void LocationsBuilderARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9630 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009631 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Serov2bbc9532016-10-21 11:51:50 +01009632 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
9633 Location::RequiresRegister());
9634 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
9635 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
9636 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
9637}
9638
9639void InstructionCodeGeneratorARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9640 vixl32::Register res = OutputRegister(instr);
9641 vixl32::Register accumulator =
9642 InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
9643 vixl32::Register mul_left =
9644 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
9645 vixl32::Register mul_right =
9646 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
9647
9648 if (instr->GetOpKind() == HInstruction::kAdd) {
9649 __ Mla(res, mul_left, mul_right, accumulator);
9650 } else {
9651 __ Mls(res, mul_left, mul_right, accumulator);
9652 }
9653}
9654
Artem Serov551b28f2016-10-18 19:11:30 +01009655void LocationsBuilderARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
9656 // Nothing to do, this should be removed during prepare for register allocator.
9657 LOG(FATAL) << "Unreachable";
9658}
9659
9660void InstructionCodeGeneratorARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
9661 // Nothing to do, this should be removed during prepare for register allocator.
9662 LOG(FATAL) << "Unreachable";
9663}
9664
9665// Simple implementation of packed switch - generate cascaded compare/jumps.
9666void LocationsBuilderARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9667 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009668 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Artem Serov551b28f2016-10-18 19:11:30 +01009669 locations->SetInAt(0, Location::RequiresRegister());
9670 if (switch_instr->GetNumEntries() > kPackedSwitchCompareJumpThreshold &&
9671 codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
9672 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the table base.
9673 if (switch_instr->GetStartValue() != 0) {
9674 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the bias.
9675 }
9676 }
9677}
9678
9679// TODO(VIXL): Investigate and reach the parity with old arm codegen.
9680void InstructionCodeGeneratorARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9681 int32_t lower_bound = switch_instr->GetStartValue();
9682 uint32_t num_entries = switch_instr->GetNumEntries();
9683 LocationSummary* locations = switch_instr->GetLocations();
9684 vixl32::Register value_reg = InputRegisterAt(switch_instr, 0);
9685 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9686
9687 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
9688 !codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
9689 // Create a series of compare/jumps.
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009690 UseScratchRegisterScope temps(GetVIXLAssembler());
Artem Serov551b28f2016-10-18 19:11:30 +01009691 vixl32::Register temp_reg = temps.Acquire();
9692 // Note: It is fine for the below AddConstantSetFlags() using IP register to temporarily store
9693 // the immediate, because IP is used as the destination register. For the other
9694 // AddConstantSetFlags() and GenerateCompareWithImmediate(), the immediate values are constant,
9695 // and they can be encoded in the instruction without making use of IP register.
9696 __ Adds(temp_reg, value_reg, -lower_bound);
9697
9698 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
9699 // Jump to successors[0] if value == lower_bound.
9700 __ B(eq, codegen_->GetLabelOf(successors[0]));
9701 int32_t last_index = 0;
9702 for (; num_entries - last_index > 2; last_index += 2) {
9703 __ Adds(temp_reg, temp_reg, -2);
9704 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
9705 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
9706 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
9707 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
9708 }
9709 if (num_entries - last_index == 2) {
9710 // The last missing case_value.
9711 __ Cmp(temp_reg, 1);
9712 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
9713 }
9714
9715 // And the default for any other value.
9716 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
9717 __ B(codegen_->GetLabelOf(default_block));
9718 }
9719 } else {
9720 // Create a table lookup.
9721 vixl32::Register table_base = RegisterFrom(locations->GetTemp(0));
9722
9723 JumpTableARMVIXL* jump_table = codegen_->CreateJumpTable(switch_instr);
9724
9725 // Remove the bias.
9726 vixl32::Register key_reg;
9727 if (lower_bound != 0) {
9728 key_reg = RegisterFrom(locations->GetTemp(1));
9729 __ Sub(key_reg, value_reg, lower_bound);
9730 } else {
9731 key_reg = value_reg;
9732 }
9733
9734 // Check whether the value is in the table, jump to default block if not.
9735 __ Cmp(key_reg, num_entries - 1);
9736 __ B(hi, codegen_->GetLabelOf(default_block));
9737
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009738 UseScratchRegisterScope temps(GetVIXLAssembler());
Artem Serov551b28f2016-10-18 19:11:30 +01009739 vixl32::Register jump_offset = temps.Acquire();
9740
9741 // Load jump offset from the table.
Scott Wakeling86e9d262017-01-18 15:59:24 +00009742 {
9743 const size_t jump_size = switch_instr->GetNumEntries() * sizeof(int32_t);
9744 ExactAssemblyScope aas(GetVIXLAssembler(),
9745 (vixl32::kMaxInstructionSizeInBytes * 4) + jump_size,
9746 CodeBufferCheckScope::kMaximumSize);
9747 __ adr(table_base, jump_table->GetTableStartLabel());
9748 __ ldr(jump_offset, MemOperand(table_base, key_reg, vixl32::LSL, 2));
Artem Serov551b28f2016-10-18 19:11:30 +01009749
Scott Wakeling86e9d262017-01-18 15:59:24 +00009750 // Jump to target block by branching to table_base(pc related) + offset.
9751 vixl32::Register target_address = table_base;
9752 __ add(target_address, table_base, jump_offset);
9753 __ bx(target_address);
Artem Serov09a940d2016-11-11 16:15:11 +00009754
Scott Wakeling86e9d262017-01-18 15:59:24 +00009755 jump_table->EmitTable(codegen_);
9756 }
Artem Serov551b28f2016-10-18 19:11:30 +01009757 }
9758}
9759
Artem Serov02d37832016-10-25 15:25:33 +01009760// Copy the result of a call into the given target.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009761void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg, DataType::Type type) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009762 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009763 DCHECK_EQ(type, DataType::Type::kVoid);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009764 return;
9765 }
9766
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009767 DCHECK_NE(type, DataType::Type::kVoid);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009768
Artem Serovd4cc5b22016-11-04 11:19:09 +00009769 Location return_loc = InvokeDexCallingConventionVisitorARMVIXL().GetReturnLocation(type);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009770 if (return_loc.Equals(trg)) {
9771 return;
9772 }
9773
Vladimir Markoac3fcff2020-11-17 12:17:58 +00009774 // Let the parallel move resolver take care of all of this.
9775 HParallelMove parallel_move(GetGraph()->GetAllocator());
9776 parallel_move.AddMove(return_loc, trg, type, nullptr);
9777 GetMoveResolver()->EmitNativeCode(&parallel_move);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009778}
Scott Wakelingfe885462016-09-22 10:24:38 +01009779
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009780void LocationsBuilderARMVIXL::VisitClassTableGet(HClassTableGet* instruction) {
9781 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009782 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009783 locations->SetInAt(0, Location::RequiresRegister());
9784 locations->SetOut(Location::RequiresRegister());
Artem Serov551b28f2016-10-18 19:11:30 +01009785}
9786
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009787void InstructionCodeGeneratorARMVIXL::VisitClassTableGet(HClassTableGet* instruction) {
9788 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
9789 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
9790 instruction->GetIndex(), kArmPointerSize).SizeValue();
9791 GetAssembler()->LoadFromOffset(kLoadWord,
9792 OutputRegister(instruction),
9793 InputRegisterAt(instruction, 0),
9794 method_offset);
9795 } else {
9796 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
9797 instruction->GetIndex(), kArmPointerSize));
9798 GetAssembler()->LoadFromOffset(kLoadWord,
9799 OutputRegister(instruction),
9800 InputRegisterAt(instruction, 0),
9801 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
9802 GetAssembler()->LoadFromOffset(kLoadWord,
9803 OutputRegister(instruction),
9804 OutputRegister(instruction),
9805 method_offset);
9806 }
Artem Serov551b28f2016-10-18 19:11:30 +01009807}
9808
Artem Serovc5fcb442016-12-02 19:19:58 +00009809static void PatchJitRootUse(uint8_t* code,
9810 const uint8_t* roots_data,
9811 VIXLUInt32Literal* literal,
9812 uint64_t index_in_table) {
9813 DCHECK(literal->IsBound());
9814 uint32_t literal_offset = literal->GetLocation();
9815 uintptr_t address =
9816 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
9817 uint8_t* data = code + literal_offset;
9818 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
9819}
9820
9821void CodeGeneratorARMVIXL::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
9822 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009823 const StringReference& string_reference = entry.first;
9824 VIXLUInt32Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01009825 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009826 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Artem Serovc5fcb442016-12-02 19:19:58 +00009827 }
9828 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009829 const TypeReference& type_reference = entry.first;
9830 VIXLUInt32Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01009831 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009832 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Artem Serovc5fcb442016-12-02 19:19:58 +00009833 }
9834}
9835
Artem Serovd4cc5b22016-11-04 11:19:09 +00009836void CodeGeneratorARMVIXL::EmitMovwMovtPlaceholder(
9837 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels,
9838 vixl32::Register out) {
Artem Serov0fb37192016-12-06 18:13:40 +00009839 ExactAssemblyScope aas(GetVIXLAssembler(),
9840 3 * vixl32::kMaxInstructionSizeInBytes,
9841 CodeBufferCheckScope::kMaximumSize);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009842 // TODO(VIXL): Think about using mov instead of movw.
9843 __ bind(&labels->movw_label);
Andreas Gampe3db70682018-12-26 15:12:03 -08009844 __ movw(out, /* operand= */ 0u);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009845 __ bind(&labels->movt_label);
Andreas Gampe3db70682018-12-26 15:12:03 -08009846 __ movt(out, /* operand= */ 0u);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009847 __ bind(&labels->add_pc_label);
9848 __ add(out, out, pc);
9849}
9850
Scott Wakelingfe885462016-09-22 10:24:38 +01009851#undef __
9852#undef QUICK_ENTRY_POINT
9853#undef TODO_VIXL32
9854
Vladimir Markoca1e0382018-04-11 09:58:41 +00009855#define __ assembler.GetVIXLAssembler()->
9856
9857static void EmitGrayCheckAndFastPath(ArmVIXLAssembler& assembler,
9858 vixl32::Register base_reg,
9859 vixl32::MemOperand& lock_word,
9860 vixl32::Label* slow_path,
Vladimir Marko7a695052018-04-12 10:26:50 +01009861 int32_t raw_ldr_offset,
9862 vixl32::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009863 // Load the lock word containing the rb_state.
9864 __ Ldr(ip, lock_word);
9865 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01009866 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Vladimir Markoca1e0382018-04-11 09:58:41 +00009867 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
9868 __ Tst(ip, Operand(LockWord::kReadBarrierStateMaskShifted));
Andreas Gampe3db70682018-12-26 15:12:03 -08009869 __ B(ne, slow_path, /* is_far_target= */ false);
Vladimir Marko7a695052018-04-12 10:26:50 +01009870 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
9871 if (throw_npe != nullptr) {
9872 __ Bind(throw_npe);
9873 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00009874 __ Add(lr, lr, raw_ldr_offset);
9875 // Introduce a dependency on the lock_word including rb_state,
9876 // to prevent load-load reordering, and without using
9877 // a memory barrier (which would be more expensive).
9878 __ Add(base_reg, base_reg, Operand(ip, LSR, 32));
9879 __ Bx(lr); // And return back to the function.
9880 // Note: The fake dependency is unnecessary for the slow path.
9881}
9882
9883// Load the read barrier introspection entrypoint in register `entrypoint`
Vladimir Markodcd117e2018-04-19 11:54:00 +01009884static vixl32::Register LoadReadBarrierMarkIntrospectionEntrypoint(ArmVIXLAssembler& assembler) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009885 // The register where the read barrier introspection entrypoint is loaded
Vladimir Markodcd117e2018-04-19 11:54:00 +01009886 // is the marking register. We clobber it here and the entrypoint restores it to 1.
9887 vixl32::Register entrypoint = mr;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009888 // entrypoint = Thread::Current()->pReadBarrierMarkReg12, i.e. pReadBarrierMarkIntrospection.
9889 DCHECK_EQ(ip.GetCode(), 12u);
9890 const int32_t entry_point_offset =
9891 Thread::ReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ip.GetCode());
9892 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
Vladimir Markodcd117e2018-04-19 11:54:00 +01009893 return entrypoint;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009894}
9895
9896void CodeGeneratorARMVIXL::CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler,
9897 uint32_t encoded_data,
9898 /*out*/ std::string* debug_name) {
9899 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
9900 switch (kind) {
9901 case BakerReadBarrierKind::kField: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009902 vixl32::Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9903 CheckValidReg(base_reg.GetCode());
9904 vixl32::Register holder_reg(BakerReadBarrierSecondRegField::Decode(encoded_data));
9905 CheckValidReg(holder_reg.GetCode());
9906 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
9907 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9908 temps.Exclude(ip);
Roland Levillain988c3912019-09-25 19:33:35 +01009909 // In the case of a field load, if `base_reg` differs from
9910 // `holder_reg`, the offset was too large and we must have emitted (during the construction
9911 // of the HIR graph, see `art::HInstructionBuilder::BuildInstanceFieldAccess`) and preserved
9912 // (see `art::PrepareForRegisterAllocation::VisitNullCheck`) an explicit null check before
9913 // the load. Otherwise, for implicit null checks, we need to null-check the holder as we do
9914 // not necessarily do that check before going to the thunk.
Vladimir Marko7a695052018-04-12 10:26:50 +01009915 vixl32::Label throw_npe_label;
9916 vixl32::Label* throw_npe = nullptr;
9917 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
9918 throw_npe = &throw_npe_label;
Andreas Gampe3db70682018-12-26 15:12:03 -08009919 __ CompareAndBranchIfZero(holder_reg, throw_npe, /* is_far_target= */ false);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009920 }
Vladimir Marko7a695052018-04-12 10:26:50 +01009921 // Check if the holder is gray and, if not, add fake dependency to the base register
9922 // and return to the LDR instruction to load the reference. Otherwise, use introspection
9923 // to load the reference and call the entrypoint that performs further checks on the
9924 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00009925 vixl32::Label slow_path;
9926 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
9927 const int32_t raw_ldr_offset = (width == BakerReadBarrierWidth::kWide)
9928 ? BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET
9929 : BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET;
Vladimir Marko7a695052018-04-12 10:26:50 +01009930 EmitGrayCheckAndFastPath(
9931 assembler, base_reg, lock_word, &slow_path, raw_ldr_offset, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009932 __ Bind(&slow_path);
9933 const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
9934 raw_ldr_offset;
Vladimir Markodcd117e2018-04-19 11:54:00 +01009935 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009936 if (width == BakerReadBarrierWidth::kWide) {
9937 MemOperand ldr_half_address(lr, ldr_offset + 2);
9938 __ Ldrh(ip, ldr_half_address); // Load the LDR immediate half-word with "Rt | imm12".
9939 __ Ubfx(ip, ip, 0, 12); // Extract the offset imm12.
9940 __ Ldr(ip, MemOperand(base_reg, ip)); // Load the reference.
9941 } else {
9942 MemOperand ldr_address(lr, ldr_offset);
9943 __ Ldrh(ip, ldr_address); // Load the LDR immediate, encoding T1.
9944 __ Add(ep_reg, // Adjust the entrypoint address to the entrypoint
9945 ep_reg, // for narrow LDR.
9946 Operand(BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_ENTRYPOINT_OFFSET));
9947 __ Ubfx(ip, ip, 6, 5); // Extract the imm5, i.e. offset / 4.
9948 __ Ldr(ip, MemOperand(base_reg, ip, LSL, 2)); // Load the reference.
9949 }
9950 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
9951 __ Bx(ep_reg); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00009952 break;
9953 }
9954 case BakerReadBarrierKind::kArray: {
9955 vixl32::Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9956 CheckValidReg(base_reg.GetCode());
9957 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9958 BakerReadBarrierSecondRegField::Decode(encoded_data));
9959 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
9960 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9961 temps.Exclude(ip);
9962 vixl32::Label slow_path;
9963 int32_t data_offset =
9964 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
9965 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
9966 DCHECK_LT(lock_word.GetOffsetImmediate(), 0);
9967 const int32_t raw_ldr_offset = BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET;
9968 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset);
9969 __ Bind(&slow_path);
9970 const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
9971 raw_ldr_offset;
9972 MemOperand ldr_address(lr, ldr_offset + 2);
9973 __ Ldrb(ip, ldr_address); // Load the LDR (register) byte with "00 | imm2 | Rm",
9974 // i.e. Rm+32 because the scale in imm2 is 2.
Vladimir Markodcd117e2018-04-19 11:54:00 +01009975 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009976 __ Bfi(ep_reg, ip, 3, 6); // Insert ip to the entrypoint address to create
9977 // a switch case target based on the index register.
9978 __ Mov(ip, base_reg); // Move the base register to ip0.
9979 __ Bx(ep_reg); // Jump to the entrypoint's array switch case.
9980 break;
9981 }
Vladimir Markod887ed82018-08-14 13:52:12 +00009982 case BakerReadBarrierKind::kGcRoot:
Vladimir Marko3d350a82020-11-18 14:14:27 +00009983 case BakerReadBarrierKind::kIntrinsicCas: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009984 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
9985 // and it does not have a forwarding address), call the correct introspection entrypoint;
9986 // otherwise return the reference (or the extracted forwarding address).
9987 // There is no gray bit check for GC roots.
9988 vixl32::Register root_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9989 CheckValidReg(root_reg.GetCode());
9990 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9991 BakerReadBarrierSecondRegField::Decode(encoded_data));
9992 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
9993 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9994 temps.Exclude(ip);
9995 vixl32::Label return_label, not_marked, forwarding_address;
Andreas Gampe3db70682018-12-26 15:12:03 -08009996 __ CompareAndBranchIfZero(root_reg, &return_label, /* is_far_target= */ false);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009997 MemOperand lock_word(root_reg, mirror::Object::MonitorOffset().Int32Value());
9998 __ Ldr(ip, lock_word);
9999 __ Tst(ip, LockWord::kMarkBitStateMaskShifted);
10000 __ B(eq, &not_marked);
10001 __ Bind(&return_label);
10002 __ Bx(lr);
10003 __ Bind(&not_marked);
10004 static_assert(LockWord::kStateShift == 30 && LockWord::kStateForwardingAddress == 3,
10005 "To use 'CMP ip, #modified-immediate; BHS', we need the lock word state in "
10006 " the highest bits and the 'forwarding address' state to have all bits set");
10007 __ Cmp(ip, Operand(0xc0000000));
10008 __ B(hs, &forwarding_address);
Vladimir Markodcd117e2018-04-19 11:54:00 +010010009 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Marko3d350a82020-11-18 14:14:27 +000010010 // Adjust the art_quick_read_barrier_mark_introspection address
10011 // in kBakerCcEntrypointRegister to one of
10012 // art_quick_read_barrier_mark_introspection_{gc_roots_{wide,narrow},intrinsic_cas}.
10013 if (kind == BakerReadBarrierKind::kIntrinsicCas) {
10014 DCHECK(width == BakerReadBarrierWidth::kWide);
10015 DCHECK(!root_reg.IsLow());
10016 }
Vladimir Markod887ed82018-08-14 13:52:12 +000010017 int32_t entrypoint_offset =
10018 (kind == BakerReadBarrierKind::kGcRoot)
10019 ? (width == BakerReadBarrierWidth::kWide)
10020 ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_ENTRYPOINT_OFFSET
10021 : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_ENTRYPOINT_OFFSET
Vladimir Marko3d350a82020-11-18 14:14:27 +000010022 : BAKER_MARK_INTROSPECTION_INTRINSIC_CAS_ENTRYPOINT_OFFSET;
Vladimir Markoca1e0382018-04-11 09:58:41 +000010023 __ Add(ep_reg, ep_reg, Operand(entrypoint_offset));
10024 __ Mov(ip, root_reg);
10025 __ Bx(ep_reg);
10026 __ Bind(&forwarding_address);
10027 __ Lsl(root_reg, ip, LockWord::kForwardingAddressShift);
10028 __ Bx(lr);
10029 break;
10030 }
10031 default:
10032 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
10033 UNREACHABLE();
10034 }
10035
Vladimir Marko966b46f2018-08-03 10:20:19 +000010036 // For JIT, the slow path is considered part of the compiled method,
Vladimir Markof91fc122020-05-13 09:21:00 +010010037 // so JIT should pass null as `debug_name`.
Vladimir Marko695348f2020-05-19 14:42:02 +010010038 DCHECK(!GetCompilerOptions().IsJitCompiler() || debug_name == nullptr);
Vladimir Marko966b46f2018-08-03 10:20:19 +000010039 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +000010040 std::ostringstream oss;
10041 oss << "BakerReadBarrierThunk";
10042 switch (kind) {
10043 case BakerReadBarrierKind::kField:
10044 oss << "Field";
10045 if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
10046 oss << "Wide";
10047 }
10048 oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
10049 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
10050 break;
10051 case BakerReadBarrierKind::kArray:
10052 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
10053 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
10054 BakerReadBarrierSecondRegField::Decode(encoded_data));
10055 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
10056 break;
10057 case BakerReadBarrierKind::kGcRoot:
10058 oss << "GcRoot";
10059 if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
10060 oss << "Wide";
10061 }
10062 oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
10063 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
10064 BakerReadBarrierSecondRegField::Decode(encoded_data));
10065 break;
Vladimir Marko3d350a82020-11-18 14:14:27 +000010066 case BakerReadBarrierKind::kIntrinsicCas:
10067 oss << "IntrinsicCas_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Markod887ed82018-08-14 13:52:12 +000010068 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
10069 BakerReadBarrierSecondRegField::Decode(encoded_data));
10070 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
10071 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +000010072 }
10073 *debug_name = oss.str();
10074 }
10075}
10076
10077#undef __
10078
Scott Wakelingfe885462016-09-22 10:24:38 +010010079} // namespace arm
10080} // namespace art