blob: 7adfe5d71d1e9187469bd5999b49a8dd3a457aef [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm_vixl.h"
18
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010019#include "arch/arm/asm_support_arm.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010020#include "arch/arm/instruction_set_features_arm.h"
21#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010025#include "code_generator_utils.h"
26#include "common_arm.h"
27#include "compiled_method.h"
28#include "entrypoints/quick/quick_entrypoints.h"
29#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Vladimir Marko6fd16062018-06-26 11:02:04 +010032#include "intrinsics.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010033#include "intrinsics_arm_vixl.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010035#include "mirror/array-inl.h"
36#include "mirror/class-inl.h"
37#include "thread.h"
38#include "utils/arm/assembler_arm_vixl.h"
39#include "utils/arm/managed_register_arm.h"
40#include "utils/assembler.h"
41#include "utils/stack_checks.h"
42
43namespace art {
44namespace arm {
45
46namespace vixl32 = vixl::aarch32;
47using namespace vixl32; // NOLINT(build/namespaces)
48
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +010049using helpers::DRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010050using helpers::DWARFReg;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010051using helpers::HighRegisterFrom;
Donghui Bai426b49c2016-11-08 14:55:38 +080052using helpers::InputDRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010053using helpers::InputOperandAt;
Scott Wakelingc34dba72016-10-03 10:14:44 +010054using helpers::InputRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010055using helpers::InputRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010056using helpers::InputSRegisterAt;
Anton Kirilov644032c2016-12-06 17:51:43 +000057using helpers::InputVRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010058using helpers::InputVRegisterAt;
Scott Wakelingb77051e2016-11-21 19:46:00 +000059using helpers::Int32ConstantFrom;
Anton Kirilov644032c2016-12-06 17:51:43 +000060using helpers::Int64ConstantFrom;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010061using helpers::LocationFrom;
62using helpers::LowRegisterFrom;
63using helpers::LowSRegisterFrom;
Donghui Bai426b49c2016-11-08 14:55:38 +080064using helpers::OperandFrom;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010065using helpers::OutputRegister;
66using helpers::OutputSRegister;
67using helpers::OutputVRegister;
68using helpers::RegisterFrom;
69using helpers::SRegisterFrom;
Anton Kirilov644032c2016-12-06 17:51:43 +000070using helpers::Uint64ConstantFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010071
Artem Serov0fb37192016-12-06 18:13:40 +000072using vixl::ExactAssemblyScope;
73using vixl::CodeBufferCheckScope;
74
Scott Wakelingfe885462016-09-22 10:24:38 +010075using RegisterList = vixl32::RegisterList;
76
77static bool ExpectedPairLayout(Location location) {
78 // We expected this for both core and fpu register pairs.
79 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
80}
Artem Serovd4cc5b22016-11-04 11:19:09 +000081// Use a local definition to prevent copying mistakes.
82static constexpr size_t kArmWordSize = static_cast<size_t>(kArmPointerSize);
83static constexpr size_t kArmBitsPerWord = kArmWordSize * kBitsPerByte;
Artem Serov551b28f2016-10-18 19:11:30 +010084static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Scott Wakelingfe885462016-09-22 10:24:38 +010085
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010086// Reference load (except object array loads) is using LDR Rt, [Rn, #offset] which can handle
87// offset < 4KiB. For offsets >= 4KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010088// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010089// the offset explicitly.
90constexpr uint32_t kReferenceLoadMinFarOffset = 4 * KB;
91
Roland Levillain5daa4952017-07-03 17:23:56 +010092// Using a base helps identify when we hit Marking Register check breakpoints.
93constexpr int kMarkingRegisterCheckBreakCodeBaseCode = 0x10;
94
Scott Wakelingfe885462016-09-22 10:24:38 +010095#ifdef __
96#error "ARM Codegen VIXL macro-assembler macro already defined."
97#endif
98
Scott Wakelingfe885462016-09-22 10:24:38 +010099// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
100#define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
101#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
102
103// Marker that code is yet to be, and must, be implemented.
104#define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented "
105
Vladimir Marko88abba22017-05-03 17:09:25 +0100106static inline bool CanEmitNarrowLdr(vixl32::Register rt, vixl32::Register rn, uint32_t offset) {
107 return rt.IsLow() && rn.IsLow() && offset < 32u;
108}
109
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100110class EmitAdrCode {
111 public:
112 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label)
113 : assembler_(assembler), rd_(rd), label_(label) {
Vladimir Markod887ed82018-08-14 13:52:12 +0000114 DCHECK(!assembler->AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100115 adr_location_ = assembler->GetCursorOffset();
116 assembler->adr(EncodingSize(Wide), rd, label);
117 }
118
119 ~EmitAdrCode() {
120 DCHECK(label_->IsBound());
121 // The ADR emitted by the assembler does not set the Thumb mode bit we need.
122 // TODO: Maybe extend VIXL to allow ADR for return address?
123 uint8_t* raw_adr = assembler_->GetBuffer()->GetOffsetAddress<uint8_t*>(adr_location_);
124 // Expecting ADR encoding T3 with `(offset & 1) == 0`.
125 DCHECK_EQ(raw_adr[1] & 0xfbu, 0xf2u); // Check bits 24-31, except 26.
126 DCHECK_EQ(raw_adr[0] & 0xffu, 0x0fu); // Check bits 16-23.
127 DCHECK_EQ(raw_adr[3] & 0x8fu, rd_.GetCode()); // Check bits 8-11 and 15.
128 DCHECK_EQ(raw_adr[2] & 0x01u, 0x00u); // Check bit 0, i.e. the `offset & 1`.
129 // Add the Thumb mode bit.
130 raw_adr[2] |= 0x01u;
131 }
132
133 private:
134 ArmVIXLMacroAssembler* const assembler_;
135 vixl32::Register rd_;
136 vixl32::Label* const label_;
137 int32_t adr_location_;
138};
139
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100140static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
141 InvokeRuntimeCallingConventionARMVIXL calling_convention;
142 RegisterSet caller_saves = RegisterSet::Empty();
143 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
144 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
145 // that the the kPrimNot result register is the same as the first argument register.
146 return caller_saves;
147}
148
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100149// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
150// for each live D registers they treat two corresponding S registers as live ones.
151//
152// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
153// from a list of contiguous S registers a list of contiguous D registers (processing first/last
154// S registers corner cases) and save/restore this new list treating them as D registers.
155// - decreasing code size
156// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
157// restored and then used in regular non SlowPath code as D register.
158//
159// For the following example (v means the S register is live):
160// D names: | D0 | D1 | D2 | D4 | ...
161// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
162// Live? | | v | v | v | v | v | v | | ...
163//
164// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
165// as D registers.
166//
167// TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers
168// for lists of floating-point registers.
169static size_t SaveContiguousSRegisterList(size_t first,
170 size_t last,
171 CodeGenerator* codegen,
172 size_t stack_offset) {
173 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
174 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
175 DCHECK_LE(first, last);
176 if ((first == last) && (first == 0)) {
177 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
178 return stack_offset + kSRegSizeInBytes;
179 }
180 if (first % 2 == 1) {
181 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
182 stack_offset += kSRegSizeInBytes;
183 }
184
185 bool save_last = false;
186 if (last % 2 == 0) {
187 save_last = true;
188 --last;
189 }
190
191 if (first < last) {
192 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
193 DCHECK_EQ((last - first + 1) % 2, 0u);
194 size_t number_of_d_regs = (last - first + 1) / 2;
195
196 if (number_of_d_regs == 1) {
197 __ Vstr(d_reg, MemOperand(sp, stack_offset));
198 } else if (number_of_d_regs > 1) {
199 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
200 vixl32::Register base = sp;
201 if (stack_offset != 0) {
202 base = temps.Acquire();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000203 __ Add(base, sp, Operand::From(stack_offset));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100204 }
205 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
206 }
207 stack_offset += number_of_d_regs * kDRegSizeInBytes;
208 }
209
210 if (save_last) {
211 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
212 stack_offset += kSRegSizeInBytes;
213 }
214
215 return stack_offset;
216}
217
218static size_t RestoreContiguousSRegisterList(size_t first,
219 size_t last,
220 CodeGenerator* codegen,
221 size_t stack_offset) {
222 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
223 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
224 DCHECK_LE(first, last);
225 if ((first == last) && (first == 0)) {
226 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
227 return stack_offset + kSRegSizeInBytes;
228 }
229 if (first % 2 == 1) {
230 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
231 stack_offset += kSRegSizeInBytes;
232 }
233
234 bool restore_last = false;
235 if (last % 2 == 0) {
236 restore_last = true;
237 --last;
238 }
239
240 if (first < last) {
241 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
242 DCHECK_EQ((last - first + 1) % 2, 0u);
243 size_t number_of_d_regs = (last - first + 1) / 2;
244 if (number_of_d_regs == 1) {
245 __ Vldr(d_reg, MemOperand(sp, stack_offset));
246 } else if (number_of_d_regs > 1) {
247 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
248 vixl32::Register base = sp;
249 if (stack_offset != 0) {
250 base = temps.Acquire();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000251 __ Add(base, sp, Operand::From(stack_offset));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100252 }
253 __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
254 }
255 stack_offset += number_of_d_regs * kDRegSizeInBytes;
256 }
257
258 if (restore_last) {
259 __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
260 stack_offset += kSRegSizeInBytes;
261 }
262
263 return stack_offset;
264}
265
Vladimir Markod5d2f2c2017-09-26 12:37:26 +0100266static LoadOperandType GetLoadOperandType(DataType::Type type) {
267 switch (type) {
268 case DataType::Type::kReference:
269 return kLoadWord;
270 case DataType::Type::kBool:
271 case DataType::Type::kUint8:
272 return kLoadUnsignedByte;
273 case DataType::Type::kInt8:
274 return kLoadSignedByte;
275 case DataType::Type::kUint16:
276 return kLoadUnsignedHalfword;
277 case DataType::Type::kInt16:
278 return kLoadSignedHalfword;
279 case DataType::Type::kInt32:
280 return kLoadWord;
281 case DataType::Type::kInt64:
282 return kLoadWordPair;
283 case DataType::Type::kFloat32:
284 return kLoadSWord;
285 case DataType::Type::kFloat64:
286 return kLoadDWord;
287 default:
288 LOG(FATAL) << "Unreachable type " << type;
289 UNREACHABLE();
290 }
291}
292
293static StoreOperandType GetStoreOperandType(DataType::Type type) {
294 switch (type) {
295 case DataType::Type::kReference:
296 return kStoreWord;
297 case DataType::Type::kBool:
298 case DataType::Type::kUint8:
299 case DataType::Type::kInt8:
300 return kStoreByte;
301 case DataType::Type::kUint16:
302 case DataType::Type::kInt16:
303 return kStoreHalfword;
304 case DataType::Type::kInt32:
305 return kStoreWord;
306 case DataType::Type::kInt64:
307 return kStoreWordPair;
308 case DataType::Type::kFloat32:
309 return kStoreSWord;
310 case DataType::Type::kFloat64:
311 return kStoreDWord;
312 default:
313 LOG(FATAL) << "Unreachable type " << type;
314 UNREACHABLE();
315 }
316}
317
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100318void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
319 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
320 size_t orig_offset = stack_offset;
321
Andreas Gampe3db70682018-12-26 15:12:03 -0800322 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100323 for (uint32_t i : LowToHighBits(core_spills)) {
324 // If the register holds an object, update the stack mask.
325 if (locations->RegisterContainsObject(i)) {
326 locations->SetStackBit(stack_offset / kVRegSize);
327 }
328 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
329 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
330 saved_core_stack_offsets_[i] = stack_offset;
331 stack_offset += kArmWordSize;
332 }
333
334 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
335 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset);
336
Andreas Gampe3db70682018-12-26 15:12:03 -0800337 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100338 orig_offset = stack_offset;
339 for (uint32_t i : LowToHighBits(fp_spills)) {
340 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
341 saved_fpu_stack_offsets_[i] = stack_offset;
342 stack_offset += kArmWordSize;
343 }
344
345 stack_offset = orig_offset;
346 while (fp_spills != 0u) {
347 uint32_t begin = CTZ(fp_spills);
348 uint32_t tmp = fp_spills + (1u << begin);
349 fp_spills &= tmp; // Clear the contiguous range of 1s.
350 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
351 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
352 }
353 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
354}
355
356void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
357 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
358 size_t orig_offset = stack_offset;
359
Andreas Gampe3db70682018-12-26 15:12:03 -0800360 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100361 for (uint32_t i : LowToHighBits(core_spills)) {
362 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
363 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
364 stack_offset += kArmWordSize;
365 }
366
367 // TODO(VIXL): Check the coherency of stack_offset after this with a test.
368 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
369 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset);
370
Andreas Gampe3db70682018-12-26 15:12:03 -0800371 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100372 while (fp_spills != 0u) {
373 uint32_t begin = CTZ(fp_spills);
374 uint32_t tmp = fp_spills + (1u << begin);
375 fp_spills &= tmp; // Clear the contiguous range of 1s.
376 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
377 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
378 }
379 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
380}
381
382class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
383 public:
384 explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {}
385
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100386 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100387 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
388 __ Bind(GetEntryLabel());
389 if (instruction_->CanThrowIntoCatchBlock()) {
390 // Live registers will be restored in the catch block if caught.
391 SaveLiveRegisters(codegen, instruction_->GetLocations());
392 }
393 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
394 instruction_,
395 instruction_->GetDexPc(),
396 this);
397 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
398 }
399
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100400 bool IsFatal() const override { return true; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100401
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100402 const char* GetDescription() const override { return "NullCheckSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100403
404 private:
405 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL);
406};
407
Scott Wakelingfe885462016-09-22 10:24:38 +0100408class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
409 public:
410 explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction)
411 : SlowPathCodeARMVIXL(instruction) {}
412
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100413 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100414 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
Scott Wakelingfe885462016-09-22 10:24:38 +0100415 __ Bind(GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100416 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Scott Wakelingfe885462016-09-22 10:24:38 +0100417 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
418 }
419
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100420 bool IsFatal() const override { return true; }
Scott Wakelingfe885462016-09-22 10:24:38 +0100421
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100422 const char* GetDescription() const override { return "DivZeroCheckSlowPathARMVIXL"; }
Scott Wakelingfe885462016-09-22 10:24:38 +0100423
424 private:
425 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL);
426};
427
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100428class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
429 public:
430 SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor)
431 : SlowPathCodeARMVIXL(instruction), successor_(successor) {}
432
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100433 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100434 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
435 __ Bind(GetEntryLabel());
436 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
437 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
438 if (successor_ == nullptr) {
439 __ B(GetReturnLabel());
440 } else {
441 __ B(arm_codegen->GetLabelOf(successor_));
442 }
443 }
444
445 vixl32::Label* GetReturnLabel() {
446 DCHECK(successor_ == nullptr);
447 return &return_label_;
448 }
449
450 HBasicBlock* GetSuccessor() const {
451 return successor_;
452 }
453
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100454 const char* GetDescription() const override { return "SuspendCheckSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100455
456 private:
457 // If not null, the block to branch to after the suspend check.
458 HBasicBlock* const successor_;
459
460 // If `successor_` is null, the label to branch to after the suspend check.
461 vixl32::Label return_label_;
462
463 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL);
464};
465
Scott Wakelingc34dba72016-10-03 10:14:44 +0100466class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
467 public:
468 explicit BoundsCheckSlowPathARMVIXL(HBoundsCheck* instruction)
469 : SlowPathCodeARMVIXL(instruction) {}
470
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100471 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100472 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
473 LocationSummary* locations = instruction_->GetLocations();
474
475 __ Bind(GetEntryLabel());
476 if (instruction_->CanThrowIntoCatchBlock()) {
477 // Live registers will be restored in the catch block if caught.
478 SaveLiveRegisters(codegen, instruction_->GetLocations());
479 }
480 // We're moving two locations to locations that could overlap, so we need a parallel
481 // move resolver.
482 InvokeRuntimeCallingConventionARMVIXL calling_convention;
483 codegen->EmitParallelMoves(
484 locations->InAt(0),
485 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100486 DataType::Type::kInt32,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100487 locations->InAt(1),
488 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100489 DataType::Type::kInt32);
Scott Wakelingc34dba72016-10-03 10:14:44 +0100490 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
491 ? kQuickThrowStringBounds
492 : kQuickThrowArrayBounds;
493 arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
494 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
495 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
496 }
497
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100498 bool IsFatal() const override { return true; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100499
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100500 const char* GetDescription() const override { return "BoundsCheckSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100501
502 private:
503 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARMVIXL);
504};
505
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100506class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
507 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100508 LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at)
509 : SlowPathCodeARMVIXL(at), cls_(cls) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100510 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100511 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100512 }
513
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100514 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000515 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000516 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100517 const uint32_t dex_pc = instruction_->GetDexPc();
518 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
519 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100520
521 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
522 __ Bind(GetEntryLabel());
523 SaveLiveRegisters(codegen, locations);
524
525 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100526 if (must_resolve_type) {
527 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm_codegen->GetGraph()->GetDexFile()));
528 dex::TypeIndex type_index = cls_->GetTypeIndex();
529 __ Mov(calling_convention.GetRegisterAt(0), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100530 arm_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
531 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100532 // If we also must_do_clinit, the resolved type is now in the correct register.
533 } else {
534 DCHECK(must_do_clinit);
535 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
536 arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), source);
537 }
538 if (must_do_clinit) {
539 arm_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
540 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100541 }
542
543 // Move the class to the desired location.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100544 if (out.IsValid()) {
545 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
546 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
547 }
548 RestoreLiveRegisters(codegen, locations);
549 __ B(GetExitLabel());
550 }
551
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100552 const char* GetDescription() const override { return "LoadClassSlowPathARMVIXL"; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100553
554 private:
555 // The class this slow path will load.
556 HLoadClass* const cls_;
557
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100558 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL);
559};
560
Artem Serovd4cc5b22016-11-04 11:19:09 +0000561class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
562 public:
563 explicit LoadStringSlowPathARMVIXL(HLoadString* instruction)
564 : SlowPathCodeARMVIXL(instruction) {}
565
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100566 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoea4c1262017-02-06 19:59:33 +0000567 DCHECK(instruction_->IsLoadString());
568 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000569 LocationSummary* locations = instruction_->GetLocations();
570 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000571 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Artem Serovd4cc5b22016-11-04 11:19:09 +0000572
573 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
574 __ Bind(GetEntryLabel());
575 SaveLiveRegisters(codegen, locations);
576
577 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000578 __ Mov(calling_convention.GetRegisterAt(0), string_index.index_);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000579 arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
580 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
581
Artem Serovd4cc5b22016-11-04 11:19:09 +0000582 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
583 RestoreLiveRegisters(codegen, locations);
584
585 __ B(GetExitLabel());
586 }
587
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100588 const char* GetDescription() const override { return "LoadStringSlowPathARMVIXL"; }
Artem Serovd4cc5b22016-11-04 11:19:09 +0000589
590 private:
591 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARMVIXL);
592};
593
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100594class TypeCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
595 public:
596 TypeCheckSlowPathARMVIXL(HInstruction* instruction, bool is_fatal)
597 : SlowPathCodeARMVIXL(instruction), is_fatal_(is_fatal) {}
598
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100599 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100600 LocationSummary* locations = instruction_->GetLocations();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100601 DCHECK(instruction_->IsCheckCast()
602 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
603
604 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
605 __ Bind(GetEntryLabel());
606
Vladimir Marko87584542017-12-12 17:47:52 +0000607 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100608 SaveLiveRegisters(codegen, locations);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100609 }
610
611 // We're moving two locations to locations that could overlap, so we need a parallel
612 // move resolver.
613 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100614
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800615 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800616 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100617 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800618 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800619 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100620 DataType::Type::kReference);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100621 if (instruction_->IsInstanceOf()) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100622 arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
623 instruction_,
624 instruction_->GetDexPc(),
625 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800626 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Artem Serovcfbe9132016-10-14 15:58:56 +0100627 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100628 } else {
629 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800630 arm_codegen->InvokeRuntime(kQuickCheckInstanceOf,
631 instruction_,
632 instruction_->GetDexPc(),
633 this);
634 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100635 }
636
637 if (!is_fatal_) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100638 RestoreLiveRegisters(codegen, locations);
639 __ B(GetExitLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100640 }
641 }
642
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100643 const char* GetDescription() const override { return "TypeCheckSlowPathARMVIXL"; }
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100644
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100645 bool IsFatal() const override { return is_fatal_; }
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100646
647 private:
648 const bool is_fatal_;
649
650 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARMVIXL);
651};
652
Scott Wakelingc34dba72016-10-03 10:14:44 +0100653class DeoptimizationSlowPathARMVIXL : public SlowPathCodeARMVIXL {
654 public:
655 explicit DeoptimizationSlowPathARMVIXL(HDeoptimize* instruction)
656 : SlowPathCodeARMVIXL(instruction) {}
657
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100658 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100659 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
660 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100661 LocationSummary* locations = instruction_->GetLocations();
662 SaveLiveRegisters(codegen, locations);
663 InvokeRuntimeCallingConventionARMVIXL calling_convention;
664 __ Mov(calling_convention.GetRegisterAt(0),
665 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
666
Scott Wakelingc34dba72016-10-03 10:14:44 +0100667 arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100668 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Scott Wakelingc34dba72016-10-03 10:14:44 +0100669 }
670
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100671 const char* GetDescription() const override { return "DeoptimizationSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100672
673 private:
674 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARMVIXL);
675};
676
677class ArraySetSlowPathARMVIXL : public SlowPathCodeARMVIXL {
678 public:
679 explicit ArraySetSlowPathARMVIXL(HInstruction* instruction) : SlowPathCodeARMVIXL(instruction) {}
680
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100681 void EmitNativeCode(CodeGenerator* codegen) override {
Scott Wakelingc34dba72016-10-03 10:14:44 +0100682 LocationSummary* locations = instruction_->GetLocations();
683 __ Bind(GetEntryLabel());
684 SaveLiveRegisters(codegen, locations);
685
686 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100687 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Scott Wakelingc34dba72016-10-03 10:14:44 +0100688 parallel_move.AddMove(
689 locations->InAt(0),
690 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100691 DataType::Type::kReference,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100692 nullptr);
693 parallel_move.AddMove(
694 locations->InAt(1),
695 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100696 DataType::Type::kInt32,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100697 nullptr);
698 parallel_move.AddMove(
699 locations->InAt(2),
700 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100701 DataType::Type::kReference,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100702 nullptr);
703 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
704
705 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
706 arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
707 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
708 RestoreLiveRegisters(codegen, locations);
709 __ B(GetExitLabel());
710 }
711
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100712 const char* GetDescription() const override { return "ArraySetSlowPathARMVIXL"; }
Scott Wakelingc34dba72016-10-03 10:14:44 +0100713
714 private:
715 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARMVIXL);
716};
717
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000718// Slow path generating a read barrier for a heap reference.
719class ReadBarrierForHeapReferenceSlowPathARMVIXL : public SlowPathCodeARMVIXL {
720 public:
721 ReadBarrierForHeapReferenceSlowPathARMVIXL(HInstruction* instruction,
722 Location out,
723 Location ref,
724 Location obj,
725 uint32_t offset,
726 Location index)
727 : SlowPathCodeARMVIXL(instruction),
728 out_(out),
729 ref_(ref),
730 obj_(obj),
731 offset_(offset),
732 index_(index) {
733 DCHECK(kEmitCompilerReadBarrier);
734 // If `obj` is equal to `out` or `ref`, it means the initial object
735 // has been overwritten by (or after) the heap object reference load
736 // to be instrumented, e.g.:
737 //
738 // __ LoadFromOffset(kLoadWord, out, out, offset);
739 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
740 //
741 // In that case, we have lost the information about the original
742 // object, and the emitted read barrier cannot work properly.
743 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
744 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
745 }
746
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100747 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000748 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
749 LocationSummary* locations = instruction_->GetLocations();
750 vixl32::Register reg_out = RegisterFrom(out_);
751 DCHECK(locations->CanCall());
752 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.GetCode()));
753 DCHECK(instruction_->IsInstanceFieldGet() ||
754 instruction_->IsStaticFieldGet() ||
755 instruction_->IsArrayGet() ||
756 instruction_->IsInstanceOf() ||
757 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700758 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000759 << "Unexpected instruction in read barrier for heap reference slow path: "
760 << instruction_->DebugName();
761 // The read barrier instrumentation of object ArrayGet
762 // instructions does not support the HIntermediateAddress
763 // instruction.
764 DCHECK(!(instruction_->IsArrayGet() &&
765 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
766
767 __ Bind(GetEntryLabel());
768 SaveLiveRegisters(codegen, locations);
769
770 // We may have to change the index's value, but as `index_` is a
771 // constant member (like other "inputs" of this slow path),
772 // introduce a copy of it, `index`.
773 Location index = index_;
774 if (index_.IsValid()) {
775 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
776 if (instruction_->IsArrayGet()) {
777 // Compute the actual memory offset and store it in `index`.
778 vixl32::Register index_reg = RegisterFrom(index_);
779 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg.GetCode()));
780 if (codegen->IsCoreCalleeSaveRegister(index_reg.GetCode())) {
781 // We are about to change the value of `index_reg` (see the
Roland Levillain9983e302017-07-14 14:34:22 +0100782 // calls to art::arm::ArmVIXLMacroAssembler::Lsl and
783 // art::arm::ArmVIXLMacroAssembler::Add below), but it has
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000784 // not been saved by the previous call to
785 // art::SlowPathCode::SaveLiveRegisters, as it is a
786 // callee-save register --
787 // art::SlowPathCode::SaveLiveRegisters does not consider
788 // callee-save registers, as it has been designed with the
789 // assumption that callee-save registers are supposed to be
790 // handled by the called function. So, as a callee-save
791 // register, `index_reg` _would_ eventually be saved onto
792 // the stack, but it would be too late: we would have
793 // changed its value earlier. Therefore, we manually save
794 // it here into another freely available register,
795 // `free_reg`, chosen of course among the caller-save
796 // registers (as a callee-save `free_reg` register would
797 // exhibit the same problem).
798 //
799 // Note we could have requested a temporary register from
800 // the register allocator instead; but we prefer not to, as
801 // this is a slow path, and we know we can find a
802 // caller-save register that is available.
803 vixl32::Register free_reg = FindAvailableCallerSaveRegister(codegen);
804 __ Mov(free_reg, index_reg);
805 index_reg = free_reg;
806 index = LocationFrom(index_reg);
807 } else {
808 // The initial register stored in `index_` has already been
809 // saved in the call to art::SlowPathCode::SaveLiveRegisters
810 // (as it is not a callee-save register), so we can freely
811 // use it.
812 }
813 // Shifting the index value contained in `index_reg` by the scale
814 // factor (2) cannot overflow in practice, as the runtime is
815 // unable to allocate object arrays with a size larger than
816 // 2^26 - 1 (that is, 2^28 - 4 bytes).
817 __ Lsl(index_reg, index_reg, TIMES_4);
818 static_assert(
819 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
820 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
821 __ Add(index_reg, index_reg, offset_);
822 } else {
823 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
824 // intrinsics, `index_` is not shifted by a scale factor of 2
825 // (as in the case of ArrayGet), as it is actually an offset
826 // to an object field within an object.
827 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
828 DCHECK(instruction_->GetLocations()->Intrinsified());
829 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
830 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
831 << instruction_->AsInvoke()->GetIntrinsic();
832 DCHECK_EQ(offset_, 0U);
833 DCHECK(index_.IsRegisterPair());
834 // UnsafeGet's offset location is a register pair, the low
835 // part contains the correct offset.
836 index = index_.ToLow();
837 }
838 }
839
840 // We're moving two or three locations to locations that could
841 // overlap, so we need a parallel move resolver.
842 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100843 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000844 parallel_move.AddMove(ref_,
845 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100846 DataType::Type::kReference,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000847 nullptr);
848 parallel_move.AddMove(obj_,
849 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100850 DataType::Type::kReference,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000851 nullptr);
852 if (index.IsValid()) {
853 parallel_move.AddMove(index,
854 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100855 DataType::Type::kInt32,
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000856 nullptr);
857 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
858 } else {
859 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
860 __ Mov(calling_convention.GetRegisterAt(2), offset_);
861 }
862 arm_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
863 CheckEntrypointTypes<
864 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
865 arm_codegen->Move32(out_, LocationFrom(r0));
866
867 RestoreLiveRegisters(codegen, locations);
868 __ B(GetExitLabel());
869 }
870
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100871 const char* GetDescription() const override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000872 return "ReadBarrierForHeapReferenceSlowPathARMVIXL";
873 }
874
875 private:
876 vixl32::Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
877 uint32_t ref = RegisterFrom(ref_).GetCode();
878 uint32_t obj = RegisterFrom(obj_).GetCode();
879 for (uint32_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
880 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
881 return vixl32::Register(i);
882 }
883 }
884 // We shall never fail to find a free caller-save register, as
885 // there are more than two core caller-save registers on ARM
886 // (meaning it is possible to find one which is different from
887 // `ref` and `obj`).
888 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
889 LOG(FATAL) << "Could not find a free caller-save register";
890 UNREACHABLE();
891 }
892
893 const Location out_;
894 const Location ref_;
895 const Location obj_;
896 const uint32_t offset_;
897 // An additional location containing an index to an array.
898 // Only used for HArrayGet and the UnsafeGetObject &
899 // UnsafeGetObjectVolatile intrinsics.
900 const Location index_;
901
902 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARMVIXL);
903};
904
905// Slow path generating a read barrier for a GC root.
906class ReadBarrierForRootSlowPathARMVIXL : public SlowPathCodeARMVIXL {
907 public:
908 ReadBarrierForRootSlowPathARMVIXL(HInstruction* instruction, Location out, Location root)
909 : SlowPathCodeARMVIXL(instruction), out_(out), root_(root) {
910 DCHECK(kEmitCompilerReadBarrier);
911 }
912
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100913 void EmitNativeCode(CodeGenerator* codegen) override {
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000914 LocationSummary* locations = instruction_->GetLocations();
915 vixl32::Register reg_out = RegisterFrom(out_);
916 DCHECK(locations->CanCall());
917 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.GetCode()));
918 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
919 << "Unexpected instruction in read barrier for GC root slow path: "
920 << instruction_->DebugName();
921
922 __ Bind(GetEntryLabel());
923 SaveLiveRegisters(codegen, locations);
924
925 InvokeRuntimeCallingConventionARMVIXL calling_convention;
926 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
927 arm_codegen->Move32(LocationFrom(calling_convention.GetRegisterAt(0)), root_);
928 arm_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
929 instruction_,
930 instruction_->GetDexPc(),
931 this);
932 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
933 arm_codegen->Move32(out_, LocationFrom(r0));
934
935 RestoreLiveRegisters(codegen, locations);
936 __ B(GetExitLabel());
937 }
938
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100939 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathARMVIXL"; }
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000940
941 private:
942 const Location out_;
943 const Location root_;
944
945 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARMVIXL);
946};
Scott Wakelingc34dba72016-10-03 10:14:44 +0100947
Scott Wakelingfe885462016-09-22 10:24:38 +0100948inline vixl32::Condition ARMCondition(IfCondition cond) {
949 switch (cond) {
950 case kCondEQ: return eq;
951 case kCondNE: return ne;
952 case kCondLT: return lt;
953 case kCondLE: return le;
954 case kCondGT: return gt;
955 case kCondGE: return ge;
956 case kCondB: return lo;
957 case kCondBE: return ls;
958 case kCondA: return hi;
959 case kCondAE: return hs;
960 }
961 LOG(FATAL) << "Unreachable";
962 UNREACHABLE();
963}
964
965// Maps signed condition to unsigned condition.
966inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) {
967 switch (cond) {
968 case kCondEQ: return eq;
969 case kCondNE: return ne;
970 // Signed to unsigned.
971 case kCondLT: return lo;
972 case kCondLE: return ls;
973 case kCondGT: return hi;
974 case kCondGE: return hs;
975 // Unsigned remain unchanged.
976 case kCondB: return lo;
977 case kCondBE: return ls;
978 case kCondA: return hi;
979 case kCondAE: return hs;
980 }
981 LOG(FATAL) << "Unreachable";
982 UNREACHABLE();
983}
984
985inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
986 // The ARM condition codes can express all the necessary branches, see the
987 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
988 // There is no dex instruction or HIR that would need the missing conditions
989 // "equal or unordered" or "not equal".
990 switch (cond) {
991 case kCondEQ: return eq;
992 case kCondNE: return ne /* unordered */;
993 case kCondLT: return gt_bias ? cc : lt /* unordered */;
994 case kCondLE: return gt_bias ? ls : le /* unordered */;
995 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
996 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
997 default:
998 LOG(FATAL) << "UNREACHABLE";
999 UNREACHABLE();
1000 }
1001}
1002
Anton Kirilov74234da2017-01-13 14:42:47 +00001003inline ShiftType ShiftFromOpKind(HDataProcWithShifterOp::OpKind op_kind) {
1004 switch (op_kind) {
1005 case HDataProcWithShifterOp::kASR: return ShiftType::ASR;
1006 case HDataProcWithShifterOp::kLSL: return ShiftType::LSL;
1007 case HDataProcWithShifterOp::kLSR: return ShiftType::LSR;
1008 default:
1009 LOG(FATAL) << "Unexpected op kind " << op_kind;
1010 UNREACHABLE();
1011 }
1012}
1013
Scott Wakelingfe885462016-09-22 10:24:38 +01001014void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const {
1015 stream << vixl32::Register(reg);
1016}
1017
1018void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
1019 stream << vixl32::SRegister(reg);
1020}
1021
Vladimir Markoa0431112018-06-25 09:32:54 +01001022const ArmInstructionSetFeatures& CodeGeneratorARMVIXL::GetInstructionSetFeatures() const {
1023 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArmInstructionSetFeatures();
1024}
1025
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001026static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) {
Scott Wakelingfe885462016-09-22 10:24:38 +01001027 uint32_t mask = 0;
1028 for (uint32_t i = regs.GetFirstSRegister().GetCode();
1029 i <= regs.GetLastSRegister().GetCode();
1030 ++i) {
1031 mask |= (1 << i);
1032 }
1033 return mask;
1034}
1035
Artem Serovd4cc5b22016-11-04 11:19:09 +00001036// Saves the register in the stack. Returns the size taken on stack.
1037size_t CodeGeneratorARMVIXL::SaveCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
1038 uint32_t reg_id ATTRIBUTE_UNUSED) {
1039 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001040 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001041}
1042
1043// Restores the register from the stack. Returns the size taken on stack.
1044size_t CodeGeneratorARMVIXL::RestoreCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
1045 uint32_t reg_id ATTRIBUTE_UNUSED) {
1046 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001047 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001048}
1049
1050size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1051 uint32_t reg_id ATTRIBUTE_UNUSED) {
1052 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001053 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00001054}
1055
1056size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1057 uint32_t reg_id ATTRIBUTE_UNUSED) {
1058 TODO_VIXL32(FATAL);
Elliott Hughesc1896c92018-11-29 11:33:18 -08001059 UNREACHABLE();
Anton Kirilove28d9ae2016-10-25 18:17:23 +01001060}
1061
Anton Kirilov74234da2017-01-13 14:42:47 +00001062static void GenerateDataProcInstruction(HInstruction::InstructionKind kind,
1063 vixl32::Register out,
1064 vixl32::Register first,
1065 const Operand& second,
1066 CodeGeneratorARMVIXL* codegen) {
1067 if (second.IsImmediate() && second.GetImmediate() == 0) {
1068 const Operand in = kind == HInstruction::kAnd
1069 ? Operand(0)
1070 : Operand(first);
1071
1072 __ Mov(out, in);
1073 } else {
1074 switch (kind) {
1075 case HInstruction::kAdd:
1076 __ Add(out, first, second);
1077 break;
1078 case HInstruction::kAnd:
1079 __ And(out, first, second);
1080 break;
1081 case HInstruction::kOr:
1082 __ Orr(out, first, second);
1083 break;
1084 case HInstruction::kSub:
1085 __ Sub(out, first, second);
1086 break;
1087 case HInstruction::kXor:
1088 __ Eor(out, first, second);
1089 break;
1090 default:
1091 LOG(FATAL) << "Unexpected instruction kind: " << kind;
1092 UNREACHABLE();
1093 }
1094 }
1095}
1096
1097static void GenerateDataProc(HInstruction::InstructionKind kind,
1098 const Location& out,
1099 const Location& first,
1100 const Operand& second_lo,
1101 const Operand& second_hi,
1102 CodeGeneratorARMVIXL* codegen) {
1103 const vixl32::Register first_hi = HighRegisterFrom(first);
1104 const vixl32::Register first_lo = LowRegisterFrom(first);
1105 const vixl32::Register out_hi = HighRegisterFrom(out);
1106 const vixl32::Register out_lo = LowRegisterFrom(out);
1107
1108 if (kind == HInstruction::kAdd) {
1109 __ Adds(out_lo, first_lo, second_lo);
1110 __ Adc(out_hi, first_hi, second_hi);
1111 } else if (kind == HInstruction::kSub) {
1112 __ Subs(out_lo, first_lo, second_lo);
1113 __ Sbc(out_hi, first_hi, second_hi);
1114 } else {
1115 GenerateDataProcInstruction(kind, out_lo, first_lo, second_lo, codegen);
1116 GenerateDataProcInstruction(kind, out_hi, first_hi, second_hi, codegen);
1117 }
1118}
1119
1120static Operand GetShifterOperand(vixl32::Register rm, ShiftType shift, uint32_t shift_imm) {
1121 return shift_imm == 0 ? Operand(rm) : Operand(rm, shift, shift_imm);
1122}
1123
1124static void GenerateLongDataProc(HDataProcWithShifterOp* instruction,
1125 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001126 DCHECK_EQ(instruction->GetType(), DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00001127 DCHECK(HDataProcWithShifterOp::IsShiftOp(instruction->GetOpKind()));
1128
1129 const LocationSummary* const locations = instruction->GetLocations();
1130 const uint32_t shift_value = instruction->GetShiftAmount();
1131 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
1132 const Location first = locations->InAt(0);
1133 const Location second = locations->InAt(1);
1134 const Location out = locations->Out();
1135 const vixl32::Register first_hi = HighRegisterFrom(first);
1136 const vixl32::Register first_lo = LowRegisterFrom(first);
1137 const vixl32::Register out_hi = HighRegisterFrom(out);
1138 const vixl32::Register out_lo = LowRegisterFrom(out);
1139 const vixl32::Register second_hi = HighRegisterFrom(second);
1140 const vixl32::Register second_lo = LowRegisterFrom(second);
1141 const ShiftType shift = ShiftFromOpKind(instruction->GetOpKind());
1142
1143 if (shift_value >= 32) {
1144 if (shift == ShiftType::LSL) {
1145 GenerateDataProcInstruction(kind,
1146 out_hi,
1147 first_hi,
1148 Operand(second_lo, ShiftType::LSL, shift_value - 32),
1149 codegen);
1150 GenerateDataProcInstruction(kind, out_lo, first_lo, 0, codegen);
1151 } else if (shift == ShiftType::ASR) {
1152 GenerateDataProc(kind,
1153 out,
1154 first,
1155 GetShifterOperand(second_hi, ShiftType::ASR, shift_value - 32),
1156 Operand(second_hi, ShiftType::ASR, 31),
1157 codegen);
1158 } else {
1159 DCHECK_EQ(shift, ShiftType::LSR);
1160 GenerateDataProc(kind,
1161 out,
1162 first,
1163 GetShifterOperand(second_hi, ShiftType::LSR, shift_value - 32),
1164 0,
1165 codegen);
1166 }
1167 } else {
1168 DCHECK_GT(shift_value, 1U);
1169 DCHECK_LT(shift_value, 32U);
1170
1171 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1172
1173 if (shift == ShiftType::LSL) {
1174 // We are not doing this for HInstruction::kAdd because the output will require
1175 // Location::kOutputOverlap; not applicable to other cases.
1176 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1177 GenerateDataProcInstruction(kind,
1178 out_hi,
1179 first_hi,
1180 Operand(second_hi, ShiftType::LSL, shift_value),
1181 codegen);
1182 GenerateDataProcInstruction(kind,
1183 out_hi,
1184 out_hi,
1185 Operand(second_lo, ShiftType::LSR, 32 - shift_value),
1186 codegen);
1187 GenerateDataProcInstruction(kind,
1188 out_lo,
1189 first_lo,
1190 Operand(second_lo, ShiftType::LSL, shift_value),
1191 codegen);
1192 } else {
1193 const vixl32::Register temp = temps.Acquire();
1194
1195 __ Lsl(temp, second_hi, shift_value);
1196 __ Orr(temp, temp, Operand(second_lo, ShiftType::LSR, 32 - shift_value));
1197 GenerateDataProc(kind,
1198 out,
1199 first,
1200 Operand(second_lo, ShiftType::LSL, shift_value),
1201 temp,
1202 codegen);
1203 }
1204 } else {
1205 DCHECK(shift == ShiftType::ASR || shift == ShiftType::LSR);
1206
1207 // We are not doing this for HInstruction::kAdd because the output will require
1208 // Location::kOutputOverlap; not applicable to other cases.
1209 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1210 GenerateDataProcInstruction(kind,
1211 out_lo,
1212 first_lo,
1213 Operand(second_lo, ShiftType::LSR, shift_value),
1214 codegen);
1215 GenerateDataProcInstruction(kind,
1216 out_lo,
1217 out_lo,
1218 Operand(second_hi, ShiftType::LSL, 32 - shift_value),
1219 codegen);
1220 GenerateDataProcInstruction(kind,
1221 out_hi,
1222 first_hi,
1223 Operand(second_hi, shift, shift_value),
1224 codegen);
1225 } else {
1226 const vixl32::Register temp = temps.Acquire();
1227
1228 __ Lsr(temp, second_lo, shift_value);
1229 __ Orr(temp, temp, Operand(second_hi, ShiftType::LSL, 32 - shift_value));
1230 GenerateDataProc(kind,
1231 out,
1232 first,
1233 temp,
1234 Operand(second_hi, shift, shift_value),
1235 codegen);
1236 }
1237 }
1238 }
1239}
1240
Donghui Bai426b49c2016-11-08 14:55:38 +08001241static void GenerateVcmp(HInstruction* instruction, CodeGeneratorARMVIXL* codegen) {
1242 const Location rhs_loc = instruction->GetLocations()->InAt(1);
1243 if (rhs_loc.IsConstant()) {
1244 // 0.0 is the only immediate that can be encoded directly in
1245 // a VCMP instruction.
1246 //
1247 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
1248 // specify that in a floating-point comparison, positive zero
1249 // and negative zero are considered equal, so we can use the
1250 // literal 0.0 for both cases here.
1251 //
1252 // Note however that some methods (Float.equal, Float.compare,
1253 // Float.compareTo, Double.equal, Double.compare,
1254 // Double.compareTo, Math.max, Math.min, StrictMath.max,
1255 // StrictMath.min) consider 0.0 to be (strictly) greater than
1256 // -0.0. So if we ever translate calls to these methods into a
1257 // HCompare instruction, we must handle the -0.0 case with
1258 // care here.
1259 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
1260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001261 const DataType::Type type = instruction->InputAt(0)->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08001262
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001263 if (type == DataType::Type::kFloat32) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001264 __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0);
1265 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001266 DCHECK_EQ(type, DataType::Type::kFloat64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001267 __ Vcmp(F64, InputDRegisterAt(instruction, 0), 0.0);
1268 }
1269 } else {
1270 __ Vcmp(InputVRegisterAt(instruction, 0), InputVRegisterAt(instruction, 1));
1271 }
1272}
1273
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001274static int64_t AdjustConstantForCondition(int64_t value,
1275 IfCondition* condition,
1276 IfCondition* opposite) {
1277 if (value == 1) {
1278 if (*condition == kCondB) {
1279 value = 0;
1280 *condition = kCondEQ;
1281 *opposite = kCondNE;
1282 } else if (*condition == kCondAE) {
1283 value = 0;
1284 *condition = kCondNE;
1285 *opposite = kCondEQ;
1286 }
1287 } else if (value == -1) {
1288 if (*condition == kCondGT) {
1289 value = 0;
1290 *condition = kCondGE;
1291 *opposite = kCondLT;
1292 } else if (*condition == kCondLE) {
1293 value = 0;
1294 *condition = kCondLT;
1295 *opposite = kCondGE;
1296 }
1297 }
1298
1299 return value;
1300}
1301
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001302static std::pair<vixl32::Condition, vixl32::Condition> GenerateLongTestConstant(
1303 HCondition* condition,
1304 bool invert,
1305 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001306 DCHECK_EQ(condition->GetLeft()->GetType(), DataType::Type::kInt64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001307
1308 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001309 IfCondition cond = condition->GetCondition();
1310 IfCondition opposite = condition->GetOppositeCondition();
1311
1312 if (invert) {
1313 std::swap(cond, opposite);
1314 }
1315
1316 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001317 const Location left = locations->InAt(0);
1318 const Location right = locations->InAt(1);
1319
1320 DCHECK(right.IsConstant());
1321
1322 const vixl32::Register left_high = HighRegisterFrom(left);
1323 const vixl32::Register left_low = LowRegisterFrom(left);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001324 int64_t value = AdjustConstantForCondition(Int64ConstantFrom(right), &cond, &opposite);
1325 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1326
1327 // Comparisons against 0 are common enough to deserve special attention.
1328 if (value == 0) {
1329 switch (cond) {
1330 case kCondNE:
1331 // x > 0 iff x != 0 when the comparison is unsigned.
1332 case kCondA:
1333 ret = std::make_pair(ne, eq);
1334 FALLTHROUGH_INTENDED;
1335 case kCondEQ:
1336 // x <= 0 iff x == 0 when the comparison is unsigned.
1337 case kCondBE:
1338 __ Orrs(temps.Acquire(), left_low, left_high);
1339 return ret;
1340 case kCondLT:
1341 case kCondGE:
1342 __ Cmp(left_high, 0);
1343 return std::make_pair(ARMCondition(cond), ARMCondition(opposite));
1344 // Trivially true or false.
1345 case kCondB:
1346 ret = std::make_pair(ne, eq);
1347 FALLTHROUGH_INTENDED;
1348 case kCondAE:
1349 __ Cmp(left_low, left_low);
1350 return ret;
1351 default:
1352 break;
1353 }
1354 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001355
1356 switch (cond) {
1357 case kCondEQ:
1358 case kCondNE:
1359 case kCondB:
1360 case kCondBE:
1361 case kCondA:
1362 case kCondAE: {
Anton Kirilov23b752b2017-07-20 14:40:44 +01001363 const uint32_t value_low = Low32Bits(value);
1364 Operand operand_low(value_low);
1365
Donghui Bai426b49c2016-11-08 14:55:38 +08001366 __ Cmp(left_high, High32Bits(value));
1367
Anton Kirilov23b752b2017-07-20 14:40:44 +01001368 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
1369 // we must ensure that the operands corresponding to the least significant
1370 // halves of the inputs fit into a 16-bit CMP encoding.
1371 if (!left_low.IsLow() || !IsUint<8>(value_low)) {
1372 operand_low = Operand(temps.Acquire());
1373 __ Mov(LeaveFlags, operand_low.GetBaseRegister(), value_low);
1374 }
1375
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001376 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08001377 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1378 2 * vixl32::k16BitT32InstructionSizeInBytes,
1379 CodeBufferCheckScope::kExactSize);
1380
1381 __ it(eq);
Anton Kirilov23b752b2017-07-20 14:40:44 +01001382 __ cmp(eq, left_low, operand_low);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001383 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001384 break;
1385 }
1386 case kCondLE:
1387 case kCondGT:
1388 // Trivially true or false.
1389 if (value == std::numeric_limits<int64_t>::max()) {
1390 __ Cmp(left_low, left_low);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001391 ret = cond == kCondLE ? std::make_pair(eq, ne) : std::make_pair(ne, eq);
Donghui Bai426b49c2016-11-08 14:55:38 +08001392 break;
1393 }
1394
1395 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001396 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001397 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001398 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001399 } else {
1400 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001401 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001402 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001403 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001404 }
1405
1406 value++;
1407 FALLTHROUGH_INTENDED;
1408 case kCondGE:
1409 case kCondLT: {
Donghui Bai426b49c2016-11-08 14:55:38 +08001410 __ Cmp(left_low, Low32Bits(value));
1411 __ Sbcs(temps.Acquire(), left_high, High32Bits(value));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001412 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001413 break;
1414 }
1415 default:
1416 LOG(FATAL) << "Unreachable";
1417 UNREACHABLE();
1418 }
1419
1420 return ret;
1421}
1422
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001423static std::pair<vixl32::Condition, vixl32::Condition> GenerateLongTest(
1424 HCondition* condition,
1425 bool invert,
1426 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001427 DCHECK_EQ(condition->GetLeft()->GetType(), DataType::Type::kInt64);
Donghui Bai426b49c2016-11-08 14:55:38 +08001428
1429 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001430 IfCondition cond = condition->GetCondition();
1431 IfCondition opposite = condition->GetOppositeCondition();
1432
1433 if (invert) {
1434 std::swap(cond, opposite);
1435 }
1436
1437 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001438 Location left = locations->InAt(0);
1439 Location right = locations->InAt(1);
1440
1441 DCHECK(right.IsRegisterPair());
1442
1443 switch (cond) {
1444 case kCondEQ:
1445 case kCondNE:
1446 case kCondB:
1447 case kCondBE:
1448 case kCondA:
1449 case kCondAE: {
1450 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right));
1451
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001452 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08001453 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1454 2 * vixl32::k16BitT32InstructionSizeInBytes,
1455 CodeBufferCheckScope::kExactSize);
1456
1457 __ it(eq);
1458 __ cmp(eq, LowRegisterFrom(left), LowRegisterFrom(right));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001459 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001460 break;
1461 }
1462 case kCondLE:
1463 case kCondGT:
1464 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001465 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001466 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001467 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001468 } else {
1469 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001470 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001471 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001472 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001473 }
1474
1475 std::swap(left, right);
1476 FALLTHROUGH_INTENDED;
1477 case kCondGE:
1478 case kCondLT: {
1479 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1480
1481 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right));
1482 __ Sbcs(temps.Acquire(), HighRegisterFrom(left), HighRegisterFrom(right));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001483 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001484 break;
1485 }
1486 default:
1487 LOG(FATAL) << "Unreachable";
1488 UNREACHABLE();
1489 }
1490
1491 return ret;
1492}
1493
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001494static std::pair<vixl32::Condition, vixl32::Condition> GenerateTest(HCondition* condition,
1495 bool invert,
1496 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001497 const DataType::Type type = condition->GetLeft()->GetType();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001498 IfCondition cond = condition->GetCondition();
1499 IfCondition opposite = condition->GetOppositeCondition();
1500 std::pair<vixl32::Condition, vixl32::Condition> ret(eq, ne);
Donghui Bai426b49c2016-11-08 14:55:38 +08001501
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001502 if (invert) {
1503 std::swap(cond, opposite);
1504 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001505
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001506 if (type == DataType::Type::kInt64) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001507 ret = condition->GetLocations()->InAt(1).IsConstant()
1508 ? GenerateLongTestConstant(condition, invert, codegen)
1509 : GenerateLongTest(condition, invert, codegen);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001510 } else if (DataType::IsFloatingPointType(type)) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001511 GenerateVcmp(condition, codegen);
1512 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
1513 ret = std::make_pair(ARMFPCondition(cond, condition->IsGtBias()),
1514 ARMFPCondition(opposite, condition->IsGtBias()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001515 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001516 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001517 __ Cmp(InputRegisterAt(condition, 0), InputOperandAt(condition, 1));
1518 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001519 }
1520
1521 return ret;
1522}
1523
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001524static void GenerateConditionGeneric(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001525 const vixl32::Register out = OutputRegister(cond);
1526 const auto condition = GenerateTest(cond, false, codegen);
1527
1528 __ Mov(LeaveFlags, out, 0);
1529
1530 if (out.IsLow()) {
1531 // We use the scope because of the IT block that follows.
1532 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1533 2 * vixl32::k16BitT32InstructionSizeInBytes,
1534 CodeBufferCheckScope::kExactSize);
1535
1536 __ it(condition.first);
1537 __ mov(condition.first, out, 1);
1538 } else {
1539 vixl32::Label done_label;
1540 vixl32::Label* const final_label = codegen->GetFinalLabel(cond, &done_label);
1541
Andreas Gampe3db70682018-12-26 15:12:03 -08001542 __ B(condition.second, final_label, /* is_far_target= */ false);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001543 __ Mov(out, 1);
1544
1545 if (done_label.IsReferenced()) {
1546 __ Bind(&done_label);
1547 }
1548 }
1549}
1550
1551static void GenerateEqualLong(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001552 DCHECK_EQ(cond->GetLeft()->GetType(), DataType::Type::kInt64);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001553
1554 const LocationSummary* const locations = cond->GetLocations();
1555 IfCondition condition = cond->GetCondition();
1556 const vixl32::Register out = OutputRegister(cond);
1557 const Location left = locations->InAt(0);
1558 const Location right = locations->InAt(1);
1559 vixl32::Register left_high = HighRegisterFrom(left);
1560 vixl32::Register left_low = LowRegisterFrom(left);
1561 vixl32::Register temp;
1562 UseScratchRegisterScope temps(codegen->GetVIXLAssembler());
1563
1564 if (right.IsConstant()) {
1565 IfCondition opposite = cond->GetOppositeCondition();
1566 const int64_t value = AdjustConstantForCondition(Int64ConstantFrom(right),
1567 &condition,
1568 &opposite);
1569 Operand right_high = High32Bits(value);
1570 Operand right_low = Low32Bits(value);
1571
1572 // The output uses Location::kNoOutputOverlap.
1573 if (out.Is(left_high)) {
1574 std::swap(left_low, left_high);
1575 std::swap(right_low, right_high);
1576 }
1577
1578 __ Sub(out, left_low, right_low);
1579 temp = temps.Acquire();
1580 __ Sub(temp, left_high, right_high);
1581 } else {
1582 DCHECK(right.IsRegisterPair());
1583 temp = temps.Acquire();
1584 __ Sub(temp, left_high, HighRegisterFrom(right));
1585 __ Sub(out, left_low, LowRegisterFrom(right));
1586 }
1587
1588 // Need to check after calling AdjustConstantForCondition().
1589 DCHECK(condition == kCondEQ || condition == kCondNE) << condition;
1590
1591 if (condition == kCondNE && out.IsLow()) {
1592 __ Orrs(out, out, temp);
1593
1594 // We use the scope because of the IT block that follows.
1595 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1596 2 * vixl32::k16BitT32InstructionSizeInBytes,
1597 CodeBufferCheckScope::kExactSize);
1598
1599 __ it(ne);
1600 __ mov(ne, out, 1);
1601 } else {
1602 __ Orr(out, out, temp);
1603 codegen->GenerateConditionWithZero(condition, out, out, temp);
1604 }
1605}
1606
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001607static void GenerateConditionLong(HCondition* cond, CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001608 DCHECK_EQ(cond->GetLeft()->GetType(), DataType::Type::kInt64);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001609
1610 const LocationSummary* const locations = cond->GetLocations();
1611 IfCondition condition = cond->GetCondition();
1612 const vixl32::Register out = OutputRegister(cond);
1613 const Location left = locations->InAt(0);
1614 const Location right = locations->InAt(1);
1615
1616 if (right.IsConstant()) {
1617 IfCondition opposite = cond->GetOppositeCondition();
1618
1619 // Comparisons against 0 are common enough to deserve special attention.
1620 if (AdjustConstantForCondition(Int64ConstantFrom(right), &condition, &opposite) == 0) {
1621 switch (condition) {
1622 case kCondNE:
1623 case kCondA:
1624 if (out.IsLow()) {
1625 // We only care if both input registers are 0 or not.
1626 __ Orrs(out, LowRegisterFrom(left), HighRegisterFrom(left));
1627
1628 // We use the scope because of the IT block that follows.
1629 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1630 2 * vixl32::k16BitT32InstructionSizeInBytes,
1631 CodeBufferCheckScope::kExactSize);
1632
1633 __ it(ne);
1634 __ mov(ne, out, 1);
1635 return;
1636 }
1637
1638 FALLTHROUGH_INTENDED;
1639 case kCondEQ:
1640 case kCondBE:
1641 // We only care if both input registers are 0 or not.
1642 __ Orr(out, LowRegisterFrom(left), HighRegisterFrom(left));
1643 codegen->GenerateConditionWithZero(condition, out, out);
1644 return;
1645 case kCondLT:
1646 case kCondGE:
1647 // We only care about the sign bit.
1648 FALLTHROUGH_INTENDED;
1649 case kCondAE:
1650 case kCondB:
1651 codegen->GenerateConditionWithZero(condition, out, HighRegisterFrom(left));
1652 return;
1653 case kCondLE:
1654 case kCondGT:
1655 default:
1656 break;
1657 }
1658 }
1659 }
1660
Anton Kirilov23b752b2017-07-20 14:40:44 +01001661 // If `out` is a low register, then the GenerateConditionGeneric()
1662 // function generates a shorter code sequence that is still branchless.
1663 if ((condition == kCondEQ || condition == kCondNE) && !out.IsLow()) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001664 GenerateEqualLong(cond, codegen);
1665 return;
1666 }
1667
Anton Kirilov23b752b2017-07-20 14:40:44 +01001668 GenerateConditionGeneric(cond, codegen);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001669}
1670
Roland Levillain6d729a72017-06-30 18:34:01 +01001671static void GenerateConditionIntegralOrNonPrimitive(HCondition* cond,
1672 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001673 const DataType::Type type = cond->GetLeft()->GetType();
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001674
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001675 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001676
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001677 if (type == DataType::Type::kInt64) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001678 GenerateConditionLong(cond, codegen);
1679 return;
1680 }
1681
1682 IfCondition condition = cond->GetCondition();
1683 vixl32::Register in = InputRegisterAt(cond, 0);
1684 const vixl32::Register out = OutputRegister(cond);
1685 const Location right = cond->GetLocations()->InAt(1);
1686 int64_t value;
1687
1688 if (right.IsConstant()) {
1689 IfCondition opposite = cond->GetOppositeCondition();
1690
1691 value = AdjustConstantForCondition(Int64ConstantFrom(right), &condition, &opposite);
1692
1693 // Comparisons against 0 are common enough to deserve special attention.
1694 if (value == 0) {
1695 switch (condition) {
1696 case kCondNE:
1697 case kCondA:
1698 if (out.IsLow() && out.Is(in)) {
1699 __ Cmp(out, 0);
1700
1701 // We use the scope because of the IT block that follows.
1702 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1703 2 * vixl32::k16BitT32InstructionSizeInBytes,
1704 CodeBufferCheckScope::kExactSize);
1705
1706 __ it(ne);
1707 __ mov(ne, out, 1);
1708 return;
1709 }
1710
1711 FALLTHROUGH_INTENDED;
1712 case kCondEQ:
1713 case kCondBE:
1714 case kCondLT:
1715 case kCondGE:
1716 case kCondAE:
1717 case kCondB:
1718 codegen->GenerateConditionWithZero(condition, out, in);
1719 return;
1720 case kCondLE:
1721 case kCondGT:
1722 default:
1723 break;
1724 }
1725 }
1726 }
1727
1728 if (condition == kCondEQ || condition == kCondNE) {
1729 Operand operand(0);
1730
1731 if (right.IsConstant()) {
1732 operand = Operand::From(value);
1733 } else if (out.Is(RegisterFrom(right))) {
1734 // Avoid 32-bit instructions if possible.
1735 operand = InputOperandAt(cond, 0);
1736 in = RegisterFrom(right);
1737 } else {
1738 operand = InputOperandAt(cond, 1);
1739 }
1740
1741 if (condition == kCondNE && out.IsLow()) {
1742 __ Subs(out, in, operand);
1743
1744 // We use the scope because of the IT block that follows.
1745 ExactAssemblyScope guard(codegen->GetVIXLAssembler(),
1746 2 * vixl32::k16BitT32InstructionSizeInBytes,
1747 CodeBufferCheckScope::kExactSize);
1748
1749 __ it(ne);
1750 __ mov(ne, out, 1);
1751 } else {
1752 __ Sub(out, in, operand);
1753 codegen->GenerateConditionWithZero(condition, out, out);
1754 }
1755
1756 return;
1757 }
1758
1759 GenerateConditionGeneric(cond, codegen);
1760}
1761
Donghui Bai426b49c2016-11-08 14:55:38 +08001762static bool CanEncodeConstantAs8BitImmediate(HConstant* constant) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001763 const DataType::Type type = constant->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08001764 bool ret = false;
1765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001766 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Donghui Bai426b49c2016-11-08 14:55:38 +08001767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001768 if (type == DataType::Type::kInt64) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001769 const uint64_t value = Uint64ConstantFrom(constant);
1770
1771 ret = IsUint<8>(Low32Bits(value)) && IsUint<8>(High32Bits(value));
1772 } else {
1773 ret = IsUint<8>(Int32ConstantFrom(constant));
1774 }
1775
1776 return ret;
1777}
1778
1779static Location Arm8BitEncodableConstantOrRegister(HInstruction* constant) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001780 DCHECK(!DataType::IsFloatingPointType(constant->GetType()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001781
1782 if (constant->IsConstant() && CanEncodeConstantAs8BitImmediate(constant->AsConstant())) {
1783 return Location::ConstantLocation(constant->AsConstant());
1784 }
1785
1786 return Location::RequiresRegister();
1787}
1788
1789static bool CanGenerateConditionalMove(const Location& out, const Location& src) {
1790 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
1791 // we check that we are not dealing with floating-point output (there is no
1792 // 16-bit VMOV encoding).
1793 if (!out.IsRegister() && !out.IsRegisterPair()) {
1794 return false;
1795 }
1796
1797 // For constants, we also check that the output is in one or two low registers,
1798 // and that the constants fit in an 8-bit unsigned integer, so that a 16-bit
1799 // MOV encoding can be used.
1800 if (src.IsConstant()) {
1801 if (!CanEncodeConstantAs8BitImmediate(src.GetConstant())) {
1802 return false;
1803 }
1804
1805 if (out.IsRegister()) {
1806 if (!RegisterFrom(out).IsLow()) {
1807 return false;
1808 }
1809 } else {
1810 DCHECK(out.IsRegisterPair());
1811
1812 if (!HighRegisterFrom(out).IsLow()) {
1813 return false;
1814 }
1815 }
1816 }
1817
1818 return true;
1819}
1820
Scott Wakelingfe885462016-09-22 10:24:38 +01001821#undef __
1822
Donghui Bai426b49c2016-11-08 14:55:38 +08001823vixl32::Label* CodeGeneratorARMVIXL::GetFinalLabel(HInstruction* instruction,
1824 vixl32::Label* final_label) {
1825 DCHECK(!instruction->IsControlFlow() && !instruction->IsSuspendCheck());
Anton Kirilov6f644202017-02-27 18:29:45 +00001826 DCHECK(!instruction->IsInvoke() || !instruction->GetLocations()->CanCall());
Donghui Bai426b49c2016-11-08 14:55:38 +08001827
1828 const HBasicBlock* const block = instruction->GetBlock();
1829 const HLoopInformation* const info = block->GetLoopInformation();
1830 HInstruction* const next = instruction->GetNext();
1831
1832 // Avoid a branch to a branch.
1833 if (next->IsGoto() && (info == nullptr ||
1834 !info->IsBackEdge(*block) ||
1835 !info->HasSuspendCheck())) {
1836 final_label = GetLabelOf(next->AsGoto()->GetSuccessor());
1837 }
1838
1839 return final_label;
1840}
1841
Scott Wakelingfe885462016-09-22 10:24:38 +01001842CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
Scott Wakelingfe885462016-09-22 10:24:38 +01001843 const CompilerOptions& compiler_options,
1844 OptimizingCompilerStats* stats)
1845 : CodeGenerator(graph,
1846 kNumberOfCoreRegisters,
1847 kNumberOfSRegisters,
1848 kNumberOfRegisterPairs,
1849 kCoreCalleeSaves.GetList(),
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001850 ComputeSRegisterListMask(kFpuCalleeSaves),
Scott Wakelingfe885462016-09-22 10:24:38 +01001851 compiler_options,
1852 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001853 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1854 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Scott Wakelingfe885462016-09-22 10:24:38 +01001855 location_builder_(graph, this),
1856 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001857 move_resolver_(graph->GetAllocator(), this),
1858 assembler_(graph->GetAllocator()),
Artem Serovc5fcb442016-12-02 19:19:58 +00001859 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001860 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001861 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001862 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001863 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001864 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001865 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001866 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001867 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001868 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Artem Serovc5fcb442016-12-02 19:19:58 +00001869 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001870 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Artem Serovc5fcb442016-12-02 19:19:58 +00001871 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +00001872 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1873 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1874 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Scott Wakelingfe885462016-09-22 10:24:38 +01001875 // Always save the LR register to mimic Quick.
1876 AddAllocatedRegister(Location::RegisterLocation(LR));
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00001877 // Give D30 and D31 as scratch register to VIXL. The register allocator only works on
1878 // S0-S31, which alias to D0-D15.
1879 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d31);
1880 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d30);
Scott Wakelingfe885462016-09-22 10:24:38 +01001881}
1882
Artem Serov551b28f2016-10-18 19:11:30 +01001883void JumpTableARMVIXL::EmitTable(CodeGeneratorARMVIXL* codegen) {
1884 uint32_t num_entries = switch_instr_->GetNumEntries();
1885 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
1886
1887 // We are about to use the assembler to place literals directly. Make sure we have enough
Scott Wakelingb77051e2016-11-21 19:46:00 +00001888 // underlying code buffer and we have generated a jump table of the right size, using
1889 // codegen->GetVIXLAssembler()->GetBuffer().Align();
Artem Serov0fb37192016-12-06 18:13:40 +00001890 ExactAssemblyScope aas(codegen->GetVIXLAssembler(),
1891 num_entries * sizeof(int32_t),
1892 CodeBufferCheckScope::kMaximumSize);
Artem Serov551b28f2016-10-18 19:11:30 +01001893 // TODO(VIXL): Check that using lower case bind is fine here.
1894 codegen->GetVIXLAssembler()->bind(&table_start_);
Artem Serov09a940d2016-11-11 16:15:11 +00001895 for (uint32_t i = 0; i < num_entries; i++) {
1896 codegen->GetVIXLAssembler()->place(bb_addresses_[i].get());
1897 }
1898}
1899
1900void JumpTableARMVIXL::FixTable(CodeGeneratorARMVIXL* codegen) {
1901 uint32_t num_entries = switch_instr_->GetNumEntries();
1902 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
1903
Artem Serov551b28f2016-10-18 19:11:30 +01001904 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
1905 for (uint32_t i = 0; i < num_entries; i++) {
1906 vixl32::Label* target_label = codegen->GetLabelOf(successors[i]);
1907 DCHECK(target_label->IsBound());
1908 int32_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
1909 // When doing BX to address we need to have lower bit set to 1 in T32.
1910 if (codegen->GetVIXLAssembler()->IsUsingT32()) {
1911 jump_offset++;
1912 }
1913 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
1914 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
Artem Serov09a940d2016-11-11 16:15:11 +00001915
Scott Wakelingb77051e2016-11-21 19:46:00 +00001916 bb_addresses_[i].get()->UpdateValue(jump_offset, codegen->GetVIXLAssembler()->GetBuffer());
Artem Serov551b28f2016-10-18 19:11:30 +01001917 }
1918}
1919
Artem Serov09a940d2016-11-11 16:15:11 +00001920void CodeGeneratorARMVIXL::FixJumpTables() {
Artem Serov551b28f2016-10-18 19:11:30 +01001921 for (auto&& jump_table : jump_tables_) {
Artem Serov09a940d2016-11-11 16:15:11 +00001922 jump_table->FixTable(this);
Artem Serov551b28f2016-10-18 19:11:30 +01001923 }
1924}
1925
Andreas Gampeca620d72016-11-08 08:09:33 -08001926#define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT
Scott Wakelingfe885462016-09-22 10:24:38 +01001927
1928void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) {
Artem Serov09a940d2016-11-11 16:15:11 +00001929 FixJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +00001930
1931 // Emit JIT baker read barrier slow paths.
1932 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
1933 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1934 uint32_t encoded_data = entry.first;
1935 vixl::aarch32::Label* slow_path_entry = &entry.second.label;
1936 __ Bind(slow_path_entry);
Andreas Gampe3db70682018-12-26 15:12:03 -08001937 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr);
Vladimir Marko966b46f2018-08-03 10:20:19 +00001938 }
1939
Scott Wakelingfe885462016-09-22 10:24:38 +01001940 GetAssembler()->FinalizeCode();
1941 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001942
1943 // Verify Baker read barrier linker patches.
1944 if (kIsDebugBuild) {
1945 ArrayRef<const uint8_t> code = allocator->GetMemory();
1946 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1947 DCHECK(info.label.IsBound());
1948 uint32_t literal_offset = info.label.GetLocation();
1949 DCHECK_ALIGNED(literal_offset, 2u);
1950
1951 auto GetInsn16 = [&code](uint32_t offset) {
1952 DCHECK_ALIGNED(offset, 2u);
1953 return (static_cast<uint32_t>(code[offset + 0]) << 0) +
1954 (static_cast<uint32_t>(code[offset + 1]) << 8);
1955 };
1956 auto GetInsn32 = [=](uint32_t offset) {
1957 return (GetInsn16(offset) << 16) + (GetInsn16(offset + 2u) << 0);
1958 };
1959
1960 uint32_t encoded_data = info.custom_data;
1961 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1962 // Check that the next instruction matches the expected LDR.
1963 switch (kind) {
1964 case BakerReadBarrierKind::kField: {
1965 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
1966 if (width == BakerReadBarrierWidth::kWide) {
1967 DCHECK_GE(code.size() - literal_offset, 8u);
1968 uint32_t next_insn = GetInsn32(literal_offset + 4u);
1969 // LDR (immediate), encoding T3, with correct base_reg.
1970 CheckValidReg((next_insn >> 12) & 0xfu); // Check destination register.
1971 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1972 CHECK_EQ(next_insn & 0xffff0000u, 0xf8d00000u | (base_reg << 16));
1973 } else {
1974 DCHECK_GE(code.size() - literal_offset, 6u);
1975 uint32_t next_insn = GetInsn16(literal_offset + 4u);
1976 // LDR (immediate), encoding T1, with correct base_reg.
1977 CheckValidReg(next_insn & 0x7u); // Check destination register.
1978 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1979 CHECK_EQ(next_insn & 0xf838u, 0x6800u | (base_reg << 3));
1980 }
1981 break;
1982 }
1983 case BakerReadBarrierKind::kArray: {
1984 DCHECK_GE(code.size() - literal_offset, 8u);
1985 uint32_t next_insn = GetInsn32(literal_offset + 4u);
1986 // LDR (register) with correct base_reg, S=1 and option=011 (LDR Wt, [Xn, Xm, LSL #2]).
1987 CheckValidReg((next_insn >> 12) & 0xfu); // Check destination register.
1988 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1989 CHECK_EQ(next_insn & 0xffff0ff0u, 0xf8500020u | (base_reg << 16));
1990 CheckValidReg(next_insn & 0xf); // Check index register
1991 break;
1992 }
1993 case BakerReadBarrierKind::kGcRoot: {
1994 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
1995 if (width == BakerReadBarrierWidth::kWide) {
1996 DCHECK_GE(literal_offset, 4u);
1997 uint32_t prev_insn = GetInsn32(literal_offset - 4u);
Vladimir Markof28be432018-08-14 12:20:51 +00001998 // LDR (immediate), encoding T3, with correct root_reg.
Vladimir Markoca1e0382018-04-11 09:58:41 +00001999 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Markof28be432018-08-14 12:20:51 +00002000 CHECK_EQ(prev_insn & 0xfff0f000u, 0xf8d00000u | (root_reg << 12));
Vladimir Markoca1e0382018-04-11 09:58:41 +00002001 } else {
2002 DCHECK_GE(literal_offset, 2u);
2003 uint32_t prev_insn = GetInsn16(literal_offset - 2u);
2004 // LDR (immediate), encoding T1, with correct root_reg.
2005 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
2006 CHECK_EQ(prev_insn & 0xf807u, 0x6800u | root_reg);
2007 }
2008 break;
2009 }
Vladimir Markod887ed82018-08-14 13:52:12 +00002010 case BakerReadBarrierKind::kUnsafeCas: {
2011 DCHECK_GE(literal_offset, 4u);
2012 uint32_t prev_insn = GetInsn32(literal_offset - 4u);
2013 // ADD (register), encoding T3, with correct root_reg.
2014 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
2015 CHECK_EQ(prev_insn & 0xfff0fff0u, 0xeb000000u | (root_reg << 8));
2016 break;
2017 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00002018 default:
2019 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
2020 UNREACHABLE();
2021 }
2022 }
2023 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002024}
2025
2026void CodeGeneratorARMVIXL::SetupBlockedRegisters() const {
Scott Wakelingfe885462016-09-22 10:24:38 +01002027 // Stack register, LR and PC are always reserved.
2028 blocked_core_registers_[SP] = true;
2029 blocked_core_registers_[LR] = true;
2030 blocked_core_registers_[PC] = true;
2031
Roland Levillain6d729a72017-06-30 18:34:01 +01002032 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2033 // Reserve marking register.
2034 blocked_core_registers_[MR] = true;
2035 }
2036
Scott Wakelingfe885462016-09-22 10:24:38 +01002037 // Reserve thread register.
2038 blocked_core_registers_[TR] = true;
2039
2040 // Reserve temp register.
2041 blocked_core_registers_[IP] = true;
2042
2043 if (GetGraph()->IsDebuggable()) {
2044 // Stubs do not save callee-save floating point registers. If the graph
2045 // is debuggable, we need to deal with these registers differently. For
2046 // now, just block them.
2047 for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode();
2048 i <= kFpuCalleeSaves.GetLastSRegister().GetCode();
2049 ++i) {
2050 blocked_fpu_registers_[i] = true;
2051 }
2052 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002053}
2054
Scott Wakelingfe885462016-09-22 10:24:38 +01002055InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph,
2056 CodeGeneratorARMVIXL* codegen)
2057 : InstructionCodeGenerator(graph, codegen),
2058 assembler_(codegen->GetAssembler()),
2059 codegen_(codegen) {}
2060
2061void CodeGeneratorARMVIXL::ComputeSpillMask() {
2062 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
2063 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
2064 // There is no easy instruction to restore just the PC on thumb2. We spill and
2065 // restore another arbitrary register.
2066 core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister.GetCode());
2067 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
2068 // We use vpush and vpop for saving and restoring floating point registers, which take
2069 // a SRegister and the number of registers to save/restore after that SRegister. We
2070 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
2071 // but in the range.
2072 if (fpu_spill_mask_ != 0) {
2073 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
2074 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
2075 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
2076 fpu_spill_mask_ |= (1 << i);
2077 }
2078 }
2079}
2080
2081void CodeGeneratorARMVIXL::GenerateFrameEntry() {
2082 bool skip_overflow_check =
2083 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
2084 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
2085 __ Bind(&frame_entry_label_);
2086
Nicolas Geoffray8d728322018-01-18 22:44:32 +00002087 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
2088 UseScratchRegisterScope temps(GetVIXLAssembler());
2089 vixl32::Register temp = temps.Acquire();
2090 __ Ldrh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2091 __ Add(temp, temp, 1);
2092 __ Strh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2093 }
2094
Scott Wakelingfe885462016-09-22 10:24:38 +01002095 if (HasEmptyFrame()) {
David Srbecky30021842019-02-13 14:19:36 +00002096 // Ensure that the CFI opcode list is not empty.
2097 GetAssembler()->cfi().Nop();
Scott Wakelingfe885462016-09-22 10:24:38 +01002098 return;
2099 }
2100
Scott Wakelingfe885462016-09-22 10:24:38 +01002101 if (!skip_overflow_check) {
xueliang.zhong10049552018-01-31 17:10:36 +00002102 // Using r4 instead of IP saves 2 bytes.
Nicolas Geoffray1a4f3ca2018-01-25 14:07:15 +00002103 UseScratchRegisterScope temps(GetVIXLAssembler());
xueliang.zhong10049552018-01-31 17:10:36 +00002104 vixl32::Register temp;
2105 // TODO: Remove this check when R4 is made a callee-save register
2106 // in ART compiled code (b/72801708). Currently we need to make
2107 // sure r4 is not blocked, e.g. in special purpose
2108 // TestCodeGeneratorARMVIXL; also asserting that r4 is available
2109 // here.
2110 if (!blocked_core_registers_[R4]) {
2111 for (vixl32::Register reg : kParameterCoreRegistersVIXL) {
2112 DCHECK(!reg.Is(r4));
2113 }
2114 DCHECK(!kCoreCalleeSaves.Includes(r4));
2115 temp = r4;
2116 } else {
2117 temp = temps.Acquire();
2118 }
Vladimir Marko33bff252017-11-01 14:35:42 +00002119 __ Sub(temp, sp, Operand::From(GetStackOverflowReservedBytes(InstructionSet::kArm)));
Scott Wakelingfe885462016-09-22 10:24:38 +01002120 // The load must immediately precede RecordPcInfo.
Artem Serov0fb37192016-12-06 18:13:40 +00002121 ExactAssemblyScope aas(GetVIXLAssembler(),
2122 vixl32::kMaxInstructionSizeInBytes,
2123 CodeBufferCheckScope::kMaximumSize);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002124 __ ldr(temp, MemOperand(temp));
2125 RecordPcInfo(nullptr, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01002126 }
2127
2128 __ Push(RegisterList(core_spill_mask_));
2129 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
2130 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
2131 0,
2132 core_spill_mask_,
2133 kArmWordSize);
2134 if (fpu_spill_mask_ != 0) {
2135 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
2136
2137 // Check that list is contiguous.
2138 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
2139
2140 __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
2141 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002142 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0), 0, fpu_spill_mask_, kArmWordSize);
Scott Wakelingfe885462016-09-22 10:24:38 +01002143 }
Scott Wakelingbffdc702016-12-07 17:46:03 +00002144
Scott Wakelingfe885462016-09-22 10:24:38 +01002145 int adjust = GetFrameSize() - FrameEntrySpillSize();
2146 __ Sub(sp, sp, adjust);
2147 GetAssembler()->cfi().AdjustCFAOffset(adjust);
Scott Wakelingbffdc702016-12-07 17:46:03 +00002148
2149 // Save the current method if we need it. Note that we do not
2150 // do this in HCurrentMethod, as the instruction might have been removed
2151 // in the SSA graph.
2152 if (RequiresCurrentMethod()) {
2153 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
2154 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01002155
2156 if (GetGraph()->HasShouldDeoptimizeFlag()) {
2157 UseScratchRegisterScope temps(GetVIXLAssembler());
2158 vixl32::Register temp = temps.Acquire();
2159 // Initialize should_deoptimize flag to 0.
2160 __ Mov(temp, 0);
2161 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, GetStackOffsetOfShouldDeoptimizeFlag());
2162 }
Roland Levillain5daa4952017-07-03 17:23:56 +01002163
Andreas Gampe3db70682018-12-26 15:12:03 -08002164 MaybeGenerateMarkingRegisterCheck(/* code= */ 1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002165}
2166
2167void CodeGeneratorARMVIXL::GenerateFrameExit() {
2168 if (HasEmptyFrame()) {
2169 __ Bx(lr);
2170 return;
2171 }
2172 GetAssembler()->cfi().RememberState();
2173 int adjust = GetFrameSize() - FrameEntrySpillSize();
2174 __ Add(sp, sp, adjust);
2175 GetAssembler()->cfi().AdjustCFAOffset(-adjust);
2176 if (fpu_spill_mask_ != 0) {
2177 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
2178
2179 // Check that list is contiguous.
2180 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
2181
2182 __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
2183 GetAssembler()->cfi().AdjustCFAOffset(
2184 -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002185 GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_);
Scott Wakelingfe885462016-09-22 10:24:38 +01002186 }
2187 // Pop LR into PC to return.
2188 DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U);
2189 uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode;
2190 __ Pop(RegisterList(pop_mask));
2191 GetAssembler()->cfi().RestoreState();
2192 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
2193}
2194
2195void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) {
2196 __ Bind(GetLabelOf(block));
2197}
2198
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002199Location InvokeDexCallingConventionVisitorARMVIXL::GetNextLocation(DataType::Type type) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002200 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002201 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002203 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002204 case DataType::Type::kInt8:
2205 case DataType::Type::kUint16:
2206 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002207 case DataType::Type::kInt32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002208 uint32_t index = gp_index_++;
2209 uint32_t stack_index = stack_index_++;
2210 if (index < calling_convention.GetNumberOfRegisters()) {
2211 return LocationFrom(calling_convention.GetRegisterAt(index));
2212 } else {
2213 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
2214 }
2215 }
2216
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002217 case DataType::Type::kInt64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002218 uint32_t index = gp_index_;
2219 uint32_t stack_index = stack_index_;
2220 gp_index_ += 2;
2221 stack_index_ += 2;
2222 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
2223 if (calling_convention.GetRegisterAt(index).Is(r1)) {
2224 // Skip R1, and use R2_R3 instead.
2225 gp_index_++;
2226 index++;
2227 }
2228 }
2229 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
2230 DCHECK_EQ(calling_convention.GetRegisterAt(index).GetCode() + 1,
2231 calling_convention.GetRegisterAt(index + 1).GetCode());
2232
2233 return LocationFrom(calling_convention.GetRegisterAt(index),
2234 calling_convention.GetRegisterAt(index + 1));
2235 } else {
2236 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
2237 }
2238 }
2239
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002240 case DataType::Type::kFloat32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002241 uint32_t stack_index = stack_index_++;
2242 if (float_index_ % 2 == 0) {
2243 float_index_ = std::max(double_index_, float_index_);
2244 }
2245 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
2246 return LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
2247 } else {
2248 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
2249 }
2250 }
2251
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002252 case DataType::Type::kFloat64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002253 double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
2254 uint32_t stack_index = stack_index_;
2255 stack_index_ += 2;
2256 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
2257 uint32_t index = double_index_;
2258 double_index_ += 2;
2259 Location result = LocationFrom(
2260 calling_convention.GetFpuRegisterAt(index),
2261 calling_convention.GetFpuRegisterAt(index + 1));
2262 DCHECK(ExpectedPairLayout(result));
2263 return result;
2264 } else {
2265 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
2266 }
2267 }
2268
Aart Bik66c158e2018-01-31 12:55:04 -08002269 case DataType::Type::kUint32:
2270 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002271 case DataType::Type::kVoid:
Artem Serovd4cc5b22016-11-04 11:19:09 +00002272 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002273 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00002274 }
2275 return Location::NoLocation();
2276}
2277
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002278Location InvokeDexCallingConventionVisitorARMVIXL::GetReturnLocation(DataType::Type type) const {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002279 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002280 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002281 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002282 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002283 case DataType::Type::kInt8:
2284 case DataType::Type::kUint16:
2285 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002286 case DataType::Type::kUint32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002287 case DataType::Type::kInt32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002288 return LocationFrom(r0);
2289 }
2290
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002291 case DataType::Type::kFloat32: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002292 return LocationFrom(s0);
2293 }
2294
Aart Bik66c158e2018-01-31 12:55:04 -08002295 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002296 case DataType::Type::kInt64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002297 return LocationFrom(r0, r1);
2298 }
2299
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002300 case DataType::Type::kFloat64: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00002301 return LocationFrom(s0, s1);
2302 }
2303
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002304 case DataType::Type::kVoid:
Artem Serovd4cc5b22016-11-04 11:19:09 +00002305 return Location::NoLocation();
2306 }
2307
2308 UNREACHABLE();
2309}
2310
2311Location InvokeDexCallingConventionVisitorARMVIXL::GetMethodLocation() const {
2312 return LocationFrom(kMethodRegister);
2313}
2314
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002315void CodeGeneratorARMVIXL::Move32(Location destination, Location source) {
2316 if (source.Equals(destination)) {
2317 return;
2318 }
2319 if (destination.IsRegister()) {
2320 if (source.IsRegister()) {
2321 __ Mov(RegisterFrom(destination), RegisterFrom(source));
2322 } else if (source.IsFpuRegister()) {
2323 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
2324 } else {
2325 GetAssembler()->LoadFromOffset(kLoadWord,
2326 RegisterFrom(destination),
2327 sp,
2328 source.GetStackIndex());
2329 }
2330 } else if (destination.IsFpuRegister()) {
2331 if (source.IsRegister()) {
2332 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
2333 } else if (source.IsFpuRegister()) {
2334 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
2335 } else {
2336 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
2337 }
2338 } else {
2339 DCHECK(destination.IsStackSlot()) << destination;
2340 if (source.IsRegister()) {
2341 GetAssembler()->StoreToOffset(kStoreWord,
2342 RegisterFrom(source),
2343 sp,
2344 destination.GetStackIndex());
2345 } else if (source.IsFpuRegister()) {
2346 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
2347 } else {
2348 DCHECK(source.IsStackSlot()) << source;
2349 UseScratchRegisterScope temps(GetVIXLAssembler());
2350 vixl32::Register temp = temps.Acquire();
2351 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
2352 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
2353 }
2354 }
2355}
2356
Artem Serovcfbe9132016-10-14 15:58:56 +01002357void CodeGeneratorARMVIXL::MoveConstant(Location location, int32_t value) {
2358 DCHECK(location.IsRegister());
2359 __ Mov(RegisterFrom(location), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01002360}
2361
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002362void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002363 // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in
2364 // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend.
Vladimir Markoca6fff82017-10-03 14:49:14 +01002365 HParallelMove move(GetGraph()->GetAllocator());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002366 move.AddMove(src, dst, dst_type, nullptr);
2367 GetMoveResolver()->EmitNativeCode(&move);
Scott Wakelingfe885462016-09-22 10:24:38 +01002368}
2369
Artem Serovcfbe9132016-10-14 15:58:56 +01002370void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location, LocationSummary* locations) {
2371 if (location.IsRegister()) {
2372 locations->AddTemp(location);
2373 } else if (location.IsRegisterPair()) {
2374 locations->AddTemp(LocationFrom(LowRegisterFrom(location)));
2375 locations->AddTemp(LocationFrom(HighRegisterFrom(location)));
2376 } else {
2377 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
2378 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002379}
2380
2381void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
2382 HInstruction* instruction,
2383 uint32_t dex_pc,
2384 SlowPathCode* slow_path) {
2385 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexandre Rames374ddf32016-11-04 10:40:49 +00002386 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value()));
2387 // Ensure the pc position is recorded immediately after the `blx` instruction.
2388 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
Artem Serov0fb37192016-12-06 18:13:40 +00002389 ExactAssemblyScope aas(GetVIXLAssembler(),
2390 vixl32::k16BitT32InstructionSizeInBytes,
2391 CodeBufferCheckScope::kExactSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00002392 __ blx(lr);
Scott Wakelingfe885462016-09-22 10:24:38 +01002393 if (EntrypointRequiresStackMap(entrypoint)) {
2394 RecordPcInfo(instruction, dex_pc, slow_path);
2395 }
2396}
2397
2398void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2399 HInstruction* instruction,
2400 SlowPathCode* slow_path) {
2401 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Alexandre Rames374ddf32016-11-04 10:40:49 +00002402 __ Ldr(lr, MemOperand(tr, entry_point_offset));
Scott Wakelingfe885462016-09-22 10:24:38 +01002403 __ Blx(lr);
2404}
2405
Scott Wakelingfe885462016-09-22 10:24:38 +01002406void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08002407 if (successor->IsExitBlock()) {
2408 DCHECK(got->GetPrevious()->AlwaysThrows());
2409 return; // no code needed
2410 }
2411
Scott Wakelingfe885462016-09-22 10:24:38 +01002412 HBasicBlock* block = got->GetBlock();
2413 HInstruction* previous = got->GetPrevious();
2414 HLoopInformation* info = block->GetLoopInformation();
2415
2416 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00002417 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
2418 UseScratchRegisterScope temps(GetVIXLAssembler());
2419 vixl32::Register temp = temps.Acquire();
2420 __ Push(vixl32::Register(kMethodRegister));
2421 GetAssembler()->LoadFromOffset(kLoadWord, kMethodRegister, sp, kArmWordSize);
2422 __ Ldrh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2423 __ Add(temp, temp, 1);
2424 __ Strh(temp, MemOperand(kMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
2425 __ Pop(vixl32::Register(kMethodRegister));
2426 }
Scott Wakelingfe885462016-09-22 10:24:38 +01002427 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2428 return;
2429 }
2430 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2431 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08002432 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 2);
Scott Wakelingfe885462016-09-22 10:24:38 +01002433 }
2434 if (!codegen_->GoesToNextBlock(block, successor)) {
2435 __ B(codegen_->GetLabelOf(successor));
2436 }
2437}
2438
2439void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) {
2440 got->SetLocations(nullptr);
2441}
2442
2443void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) {
2444 HandleGoto(got, got->GetSuccessor());
2445}
2446
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002447void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
2448 try_boundary->SetLocations(nullptr);
2449}
2450
2451void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
2452 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2453 if (!successor->IsExitBlock()) {
2454 HandleGoto(try_boundary, successor);
2455 }
2456}
2457
Scott Wakelingfe885462016-09-22 10:24:38 +01002458void LocationsBuilderARMVIXL::VisitExit(HExit* exit) {
2459 exit->SetLocations(nullptr);
2460}
2461
2462void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2463}
2464
Scott Wakelingfe885462016-09-22 10:24:38 +01002465void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
Anton Kirilov23b752b2017-07-20 14:40:44 +01002466 vixl32::Label* true_target,
2467 vixl32::Label* false_target,
Anton Kirilovfd522532017-05-10 12:46:57 +01002468 bool is_far_target) {
Anton Kirilov23b752b2017-07-20 14:40:44 +01002469 if (true_target == false_target) {
2470 DCHECK(true_target != nullptr);
2471 __ B(true_target);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002472 return;
2473 }
2474
Anton Kirilov23b752b2017-07-20 14:40:44 +01002475 vixl32::Label* non_fallthrough_target;
2476 bool invert;
2477 bool emit_both_branches;
Scott Wakelingfe885462016-09-22 10:24:38 +01002478
Anton Kirilov23b752b2017-07-20 14:40:44 +01002479 if (true_target == nullptr) {
2480 // The true target is fallthrough.
2481 DCHECK(false_target != nullptr);
2482 non_fallthrough_target = false_target;
2483 invert = true;
2484 emit_both_branches = false;
2485 } else {
2486 non_fallthrough_target = true_target;
2487 invert = false;
2488 // Either the false target is fallthrough, or there is no fallthrough
2489 // and both branches must be emitted.
2490 emit_both_branches = (false_target != nullptr);
Scott Wakelingfe885462016-09-22 10:24:38 +01002491 }
2492
Anton Kirilov23b752b2017-07-20 14:40:44 +01002493 const auto cond = GenerateTest(condition, invert, codegen_);
2494
2495 __ B(cond.first, non_fallthrough_target, is_far_target);
2496
2497 if (emit_both_branches) {
2498 // No target falls through, we need to branch.
2499 __ B(false_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002500 }
2501}
2502
2503void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction,
2504 size_t condition_input_index,
2505 vixl32::Label* true_target,
xueliang.zhongf51bc622016-11-04 09:23:32 +00002506 vixl32::Label* false_target,
2507 bool far_target) {
Scott Wakelingfe885462016-09-22 10:24:38 +01002508 HInstruction* cond = instruction->InputAt(condition_input_index);
2509
2510 if (true_target == nullptr && false_target == nullptr) {
2511 // Nothing to do. The code always falls through.
2512 return;
2513 } else if (cond->IsIntConstant()) {
2514 // Constant condition, statically compared against "true" (integer value 1).
2515 if (cond->AsIntConstant()->IsTrue()) {
2516 if (true_target != nullptr) {
2517 __ B(true_target);
2518 }
2519 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00002520 DCHECK(cond->AsIntConstant()->IsFalse()) << Int32ConstantFrom(cond);
Scott Wakelingfe885462016-09-22 10:24:38 +01002521 if (false_target != nullptr) {
2522 __ B(false_target);
2523 }
2524 }
2525 return;
2526 }
2527
2528 // The following code generates these patterns:
2529 // (1) true_target == nullptr && false_target != nullptr
2530 // - opposite condition true => branch to false_target
2531 // (2) true_target != nullptr && false_target == nullptr
2532 // - condition true => branch to true_target
2533 // (3) true_target != nullptr && false_target != nullptr
2534 // - condition true => branch to true_target
2535 // - branch to false_target
2536 if (IsBooleanValueOrMaterializedCondition(cond)) {
2537 // Condition has been materialized, compare the output to 0.
2538 if (kIsDebugBuild) {
2539 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
2540 DCHECK(cond_val.IsRegister());
2541 }
2542 if (true_target == nullptr) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00002543 __ CompareAndBranchIfZero(InputRegisterAt(instruction, condition_input_index),
2544 false_target,
2545 far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002546 } else {
xueliang.zhongf51bc622016-11-04 09:23:32 +00002547 __ CompareAndBranchIfNonZero(InputRegisterAt(instruction, condition_input_index),
2548 true_target,
2549 far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002550 }
2551 } else {
2552 // Condition has not been materialized. Use its inputs as the comparison and
2553 // its condition as the branch condition.
2554 HCondition* condition = cond->AsCondition();
2555
2556 // If this is a long or FP comparison that has been folded into
2557 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002558 DataType::Type type = condition->InputAt(0)->GetType();
2559 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
Anton Kirilovfd522532017-05-10 12:46:57 +01002560 GenerateCompareTestAndBranch(condition, true_target, false_target, far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002561 return;
2562 }
2563
Donghui Bai426b49c2016-11-08 14:55:38 +08002564 vixl32::Label* non_fallthrough_target;
2565 vixl32::Condition arm_cond = vixl32::Condition::None();
2566 const vixl32::Register left = InputRegisterAt(cond, 0);
2567 const Operand right = InputOperandAt(cond, 1);
2568
Scott Wakelingfe885462016-09-22 10:24:38 +01002569 if (true_target == nullptr) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002570 arm_cond = ARMCondition(condition->GetOppositeCondition());
2571 non_fallthrough_target = false_target;
Scott Wakelingfe885462016-09-22 10:24:38 +01002572 } else {
Donghui Bai426b49c2016-11-08 14:55:38 +08002573 arm_cond = ARMCondition(condition->GetCondition());
2574 non_fallthrough_target = true_target;
2575 }
2576
2577 if (right.IsImmediate() && right.GetImmediate() == 0 && (arm_cond.Is(ne) || arm_cond.Is(eq))) {
2578 if (arm_cond.Is(eq)) {
Anton Kirilovfd522532017-05-10 12:46:57 +01002579 __ CompareAndBranchIfZero(left, non_fallthrough_target, far_target);
Donghui Bai426b49c2016-11-08 14:55:38 +08002580 } else {
2581 DCHECK(arm_cond.Is(ne));
Anton Kirilovfd522532017-05-10 12:46:57 +01002582 __ CompareAndBranchIfNonZero(left, non_fallthrough_target, far_target);
Donghui Bai426b49c2016-11-08 14:55:38 +08002583 }
2584 } else {
2585 __ Cmp(left, right);
Anton Kirilovfd522532017-05-10 12:46:57 +01002586 __ B(arm_cond, non_fallthrough_target, far_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002587 }
2588 }
2589
2590 // If neither branch falls through (case 3), the conditional branch to `true_target`
2591 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2592 if (true_target != nullptr && false_target != nullptr) {
2593 __ B(false_target);
2594 }
2595}
2596
2597void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002598 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
Scott Wakelingfe885462016-09-22 10:24:38 +01002599 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
2600 locations->SetInAt(0, Location::RequiresRegister());
2601 }
2602}
2603
2604void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) {
2605 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2606 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002607 vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2608 nullptr : codegen_->GetLabelOf(true_successor);
2609 vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2610 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08002611 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Scott Wakelingfe885462016-09-22 10:24:38 +01002612}
2613
Scott Wakelingc34dba72016-10-03 10:14:44 +01002614void LocationsBuilderARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002615 LocationSummary* locations = new (GetGraph()->GetAllocator())
Scott Wakelingc34dba72016-10-03 10:14:44 +01002616 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01002617 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2618 RegisterSet caller_saves = RegisterSet::Empty();
2619 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
2620 locations->SetCustomSlowPathCallerSaves(caller_saves);
Scott Wakelingc34dba72016-10-03 10:14:44 +01002621 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
2622 locations->SetInAt(0, Location::RequiresRegister());
2623 }
2624}
2625
2626void InstructionCodeGeneratorARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
2627 SlowPathCodeARMVIXL* slow_path =
2628 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARMVIXL>(deoptimize);
2629 GenerateTestAndBranch(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08002630 /* condition_input_index= */ 0,
Scott Wakelingc34dba72016-10-03 10:14:44 +01002631 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08002632 /* false_target= */ nullptr);
Scott Wakelingc34dba72016-10-03 10:14:44 +01002633}
2634
Artem Serovd4cc5b22016-11-04 11:19:09 +00002635void LocationsBuilderARMVIXL::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002636 LocationSummary* locations = new (GetGraph()->GetAllocator())
Artem Serovd4cc5b22016-11-04 11:19:09 +00002637 LocationSummary(flag, LocationSummary::kNoCall);
2638 locations->SetOut(Location::RequiresRegister());
2639}
2640
2641void InstructionCodeGeneratorARMVIXL::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2642 GetAssembler()->LoadFromOffset(kLoadWord,
2643 OutputRegister(flag),
2644 sp,
2645 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
2646}
2647
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002648void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002649 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002650 const bool is_floating_point = DataType::IsFloatingPointType(select->GetType());
Donghui Bai426b49c2016-11-08 14:55:38 +08002651
2652 if (is_floating_point) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002653 locations->SetInAt(0, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08002654 locations->SetInAt(1, Location::FpuRegisterOrConstant(select->GetTrueValue()));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002655 } else {
2656 locations->SetInAt(0, Location::RequiresRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08002657 locations->SetInAt(1, Arm8BitEncodableConstantOrRegister(select->GetTrueValue()));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002658 }
Donghui Bai426b49c2016-11-08 14:55:38 +08002659
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002660 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002661 locations->SetInAt(2, Location::RegisterOrConstant(select->GetCondition()));
2662 // The code generator handles overlap with the values, but not with the condition.
2663 locations->SetOut(Location::SameAsFirstInput());
2664 } else if (is_floating_point) {
2665 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2666 } else {
2667 if (!locations->InAt(1).IsConstant()) {
2668 locations->SetInAt(0, Arm8BitEncodableConstantOrRegister(select->GetFalseValue()));
2669 }
2670
2671 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002672 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002673}
2674
2675void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002676 HInstruction* const condition = select->GetCondition();
2677 const LocationSummary* const locations = select->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002678 const DataType::Type type = select->GetType();
Donghui Bai426b49c2016-11-08 14:55:38 +08002679 const Location first = locations->InAt(0);
2680 const Location out = locations->Out();
2681 const Location second = locations->InAt(1);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002682
2683 // In the unlucky case the output of this instruction overlaps
2684 // with an input of an "emitted-at-use-site" condition, and
2685 // the output of this instruction is not one of its inputs, we'll
2686 // need to fallback to branches instead of conditional ARM instructions.
2687 bool output_overlaps_with_condition_inputs =
2688 !IsBooleanValueOrMaterializedCondition(condition) &&
2689 !out.Equals(first) &&
2690 !out.Equals(second) &&
2691 (condition->GetLocations()->InAt(0).Equals(out) ||
2692 condition->GetLocations()->InAt(1).Equals(out));
2693 DCHECK(!output_overlaps_with_condition_inputs || condition->IsCondition());
Donghui Bai426b49c2016-11-08 14:55:38 +08002694 Location src;
2695
2696 if (condition->IsIntConstant()) {
2697 if (condition->AsIntConstant()->IsFalse()) {
2698 src = first;
2699 } else {
2700 src = second;
2701 }
2702
2703 codegen_->MoveLocation(out, src, type);
2704 return;
2705 }
2706
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002707 if (!DataType::IsFloatingPointType(type) && !output_overlaps_with_condition_inputs) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002708 bool invert = false;
2709
2710 if (out.Equals(second)) {
2711 src = first;
2712 invert = true;
2713 } else if (out.Equals(first)) {
2714 src = second;
2715 } else if (second.IsConstant()) {
2716 DCHECK(CanEncodeConstantAs8BitImmediate(second.GetConstant()));
2717 src = second;
2718 } else if (first.IsConstant()) {
2719 DCHECK(CanEncodeConstantAs8BitImmediate(first.GetConstant()));
2720 src = first;
2721 invert = true;
2722 } else {
2723 src = second;
2724 }
2725
2726 if (CanGenerateConditionalMove(out, src)) {
2727 if (!out.Equals(first) && !out.Equals(second)) {
2728 codegen_->MoveLocation(out, src.Equals(first) ? second : first, type);
2729 }
2730
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002731 std::pair<vixl32::Condition, vixl32::Condition> cond(eq, ne);
2732
2733 if (IsBooleanValueOrMaterializedCondition(condition)) {
2734 __ Cmp(InputRegisterAt(select, 2), 0);
2735 cond = invert ? std::make_pair(eq, ne) : std::make_pair(ne, eq);
2736 } else {
2737 cond = GenerateTest(condition->AsCondition(), invert, codegen_);
2738 }
2739
Donghui Bai426b49c2016-11-08 14:55:38 +08002740 const size_t instr_count = out.IsRegisterPair() ? 4 : 2;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002741 // We use the scope because of the IT block that follows.
Donghui Bai426b49c2016-11-08 14:55:38 +08002742 ExactAssemblyScope guard(GetVIXLAssembler(),
2743 instr_count * vixl32::k16BitT32InstructionSizeInBytes,
2744 CodeBufferCheckScope::kExactSize);
2745
2746 if (out.IsRegister()) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002747 __ it(cond.first);
2748 __ mov(cond.first, RegisterFrom(out), OperandFrom(src, type));
Donghui Bai426b49c2016-11-08 14:55:38 +08002749 } else {
2750 DCHECK(out.IsRegisterPair());
2751
2752 Operand operand_high(0);
2753 Operand operand_low(0);
2754
2755 if (src.IsConstant()) {
2756 const int64_t value = Int64ConstantFrom(src);
2757
2758 operand_high = High32Bits(value);
2759 operand_low = Low32Bits(value);
2760 } else {
2761 DCHECK(src.IsRegisterPair());
2762 operand_high = HighRegisterFrom(src);
2763 operand_low = LowRegisterFrom(src);
2764 }
2765
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002766 __ it(cond.first);
2767 __ mov(cond.first, LowRegisterFrom(out), operand_low);
2768 __ it(cond.first);
2769 __ mov(cond.first, HighRegisterFrom(out), operand_high);
Donghui Bai426b49c2016-11-08 14:55:38 +08002770 }
2771
2772 return;
2773 }
2774 }
2775
2776 vixl32::Label* false_target = nullptr;
2777 vixl32::Label* true_target = nullptr;
2778 vixl32::Label select_end;
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002779 vixl32::Label other_case;
Donghui Bai426b49c2016-11-08 14:55:38 +08002780 vixl32::Label* const target = codegen_->GetFinalLabel(select, &select_end);
2781
2782 if (out.Equals(second)) {
2783 true_target = target;
2784 src = first;
2785 } else {
2786 false_target = target;
2787 src = second;
2788
2789 if (!out.Equals(first)) {
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002790 if (output_overlaps_with_condition_inputs) {
2791 false_target = &other_case;
2792 } else {
2793 codegen_->MoveLocation(out, first, type);
2794 }
Donghui Bai426b49c2016-11-08 14:55:38 +08002795 }
2796 }
2797
Andreas Gampe3db70682018-12-26 15:12:03 -08002798 GenerateTestAndBranch(select, 2, true_target, false_target, /* far_target= */ false);
Donghui Bai426b49c2016-11-08 14:55:38 +08002799 codegen_->MoveLocation(out, src, type);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002800 if (output_overlaps_with_condition_inputs) {
2801 __ B(target);
2802 __ Bind(&other_case);
2803 codegen_->MoveLocation(out, first, type);
2804 }
Donghui Bai426b49c2016-11-08 14:55:38 +08002805
2806 if (select_end.IsReferenced()) {
2807 __ Bind(&select_end);
2808 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002809}
2810
Artem Serov551b28f2016-10-18 19:11:30 +01002811void LocationsBuilderARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002812 new (GetGraph()->GetAllocator()) LocationSummary(info);
Artem Serov551b28f2016-10-18 19:11:30 +01002813}
2814
2815void InstructionCodeGeneratorARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo*) {
2816 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
2817}
2818
Scott Wakelingfe885462016-09-22 10:24:38 +01002819void CodeGeneratorARMVIXL::GenerateNop() {
2820 __ Nop();
2821}
2822
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002823// `temp` is an extra temporary register that is used for some conditions;
2824// callers may not specify it, in which case the method will use a scratch
2825// register instead.
2826void CodeGeneratorARMVIXL::GenerateConditionWithZero(IfCondition condition,
2827 vixl32::Register out,
2828 vixl32::Register in,
2829 vixl32::Register temp) {
2830 switch (condition) {
2831 case kCondEQ:
2832 // x <= 0 iff x == 0 when the comparison is unsigned.
2833 case kCondBE:
2834 if (!temp.IsValid() || (out.IsLow() && !out.Is(in))) {
2835 temp = out;
2836 }
2837
2838 // Avoid 32-bit instructions if possible; note that `in` and `temp` must be
2839 // different as well.
2840 if (in.IsLow() && temp.IsLow() && !in.Is(temp)) {
2841 // temp = - in; only 0 sets the carry flag.
2842 __ Rsbs(temp, in, 0);
2843
2844 if (out.Is(in)) {
2845 std::swap(in, temp);
2846 }
2847
2848 // out = - in + in + carry = carry
2849 __ Adc(out, temp, in);
2850 } else {
2851 // If `in` is 0, then it has 32 leading zeros, and less than that otherwise.
2852 __ Clz(out, in);
2853 // Any number less than 32 logically shifted right by 5 bits results in 0;
2854 // the same operation on 32 yields 1.
2855 __ Lsr(out, out, 5);
2856 }
2857
2858 break;
2859 case kCondNE:
2860 // x > 0 iff x != 0 when the comparison is unsigned.
2861 case kCondA: {
2862 UseScratchRegisterScope temps(GetVIXLAssembler());
2863
2864 if (out.Is(in)) {
2865 if (!temp.IsValid() || in.Is(temp)) {
2866 temp = temps.Acquire();
2867 }
2868 } else if (!temp.IsValid() || !temp.IsLow()) {
2869 temp = out;
2870 }
2871
2872 // temp = in - 1; only 0 does not set the carry flag.
2873 __ Subs(temp, in, 1);
2874 // out = in + ~temp + carry = in + (-(in - 1) - 1) + carry = in - in + 1 - 1 + carry = carry
2875 __ Sbc(out, in, temp);
2876 break;
2877 }
2878 case kCondGE:
2879 __ Mvn(out, in);
2880 in = out;
2881 FALLTHROUGH_INTENDED;
2882 case kCondLT:
2883 // We only care about the sign bit.
2884 __ Lsr(out, in, 31);
2885 break;
2886 case kCondAE:
2887 // Trivially true.
2888 __ Mov(out, 1);
2889 break;
2890 case kCondB:
2891 // Trivially false.
2892 __ Mov(out, 0);
2893 break;
2894 default:
2895 LOG(FATAL) << "Unexpected condition " << condition;
2896 UNREACHABLE();
2897 }
2898}
2899
Scott Wakelingfe885462016-09-22 10:24:38 +01002900void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) {
2901 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002902 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Nicolas Geoffray7b05c5f2018-09-21 11:31:38 +01002903 const DataType::Type type = cond->InputAt(0)->GetType();
2904 if (DataType::IsFloatingPointType(type)) {
2905 locations->SetInAt(0, Location::RequiresFpuRegister());
2906 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(cond->InputAt(1)));
2907 } else {
2908 locations->SetInAt(0, Location::RequiresRegister());
2909 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
2910 }
2911 if (!cond->IsEmittedAtUseSite()) {
2912 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelingfe885462016-09-22 10:24:38 +01002913 }
2914}
2915
2916void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) {
2917 if (cond->IsEmittedAtUseSite()) {
2918 return;
2919 }
2920
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002921 const DataType::Type type = cond->GetLeft()->GetType();
Scott Wakelingfe885462016-09-22 10:24:38 +01002922
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002923 if (DataType::IsFloatingPointType(type)) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002924 GenerateConditionGeneric(cond, codegen_);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002925 return;
Scott Wakelingfe885462016-09-22 10:24:38 +01002926 }
2927
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002928 DCHECK(DataType::IsIntegralType(type) || type == DataType::Type::kReference) << type;
Scott Wakelingfe885462016-09-22 10:24:38 +01002929
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002930 const IfCondition condition = cond->GetCondition();
Scott Wakelingfe885462016-09-22 10:24:38 +01002931
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002932 // A condition with only one boolean input, or two boolean inputs without being equality or
2933 // inequality results from transformations done by the instruction simplifier, and is handled
2934 // as a regular condition with integral inputs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002935 if (type == DataType::Type::kBool &&
2936 cond->GetRight()->GetType() == DataType::Type::kBool &&
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002937 (condition == kCondEQ || condition == kCondNE)) {
2938 vixl32::Register left = InputRegisterAt(cond, 0);
2939 const vixl32::Register out = OutputRegister(cond);
2940 const Location right_loc = cond->GetLocations()->InAt(1);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002941
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002942 // The constant case is handled by the instruction simplifier.
2943 DCHECK(!right_loc.IsConstant());
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002944
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002945 vixl32::Register right = RegisterFrom(right_loc);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002946
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002947 // Avoid 32-bit instructions if possible.
2948 if (out.Is(right)) {
2949 std::swap(left, right);
2950 }
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002951
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002952 __ Eor(out, left, right);
2953
2954 if (condition == kCondEQ) {
2955 __ Eor(out, out, 1);
2956 }
2957
2958 return;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002959 }
Anton Kirilov6f644202017-02-27 18:29:45 +00002960
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002961 GenerateConditionIntegralOrNonPrimitive(cond, codegen_);
Scott Wakelingfe885462016-09-22 10:24:38 +01002962}
2963
2964void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) {
2965 HandleCondition(comp);
2966}
2967
2968void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) {
2969 HandleCondition(comp);
2970}
2971
2972void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) {
2973 HandleCondition(comp);
2974}
2975
2976void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) {
2977 HandleCondition(comp);
2978}
2979
2980void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) {
2981 HandleCondition(comp);
2982}
2983
2984void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) {
2985 HandleCondition(comp);
2986}
2987
2988void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
2989 HandleCondition(comp);
2990}
2991
2992void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
2993 HandleCondition(comp);
2994}
2995
2996void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
2997 HandleCondition(comp);
2998}
2999
3000void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
3001 HandleCondition(comp);
3002}
3003
3004void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
3005 HandleCondition(comp);
3006}
3007
3008void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
3009 HandleCondition(comp);
3010}
3011
3012void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) {
3013 HandleCondition(comp);
3014}
3015
3016void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) {
3017 HandleCondition(comp);
3018}
3019
3020void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
3021 HandleCondition(comp);
3022}
3023
3024void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
3025 HandleCondition(comp);
3026}
3027
3028void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) {
3029 HandleCondition(comp);
3030}
3031
3032void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) {
3033 HandleCondition(comp);
3034}
3035
3036void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
3037 HandleCondition(comp);
3038}
3039
3040void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
3041 HandleCondition(comp);
3042}
3043
3044void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) {
3045 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003046 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003047 locations->SetOut(Location::ConstantLocation(constant));
3048}
3049
3050void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
3051 // Will be generated at use site.
3052}
3053
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003054void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) {
3055 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003056 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003057 locations->SetOut(Location::ConstantLocation(constant));
3058}
3059
3060void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
3061 // Will be generated at use site.
3062}
3063
Scott Wakelingfe885462016-09-22 10:24:38 +01003064void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) {
3065 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003066 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003067 locations->SetOut(Location::ConstantLocation(constant));
3068}
3069
3070void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3071 // Will be generated at use site.
3072}
3073
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003074void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) {
3075 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003076 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003077 locations->SetOut(Location::ConstantLocation(constant));
3078}
3079
Scott Wakelingc34dba72016-10-03 10:14:44 +01003080void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(
3081 HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003082 // Will be generated at use site.
3083}
3084
3085void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) {
3086 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003087 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003088 locations->SetOut(Location::ConstantLocation(constant));
3089}
3090
Scott Wakelingc34dba72016-10-03 10:14:44 +01003091void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(
3092 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01003093 // Will be generated at use site.
3094}
3095
Igor Murashkind01745e2017-04-05 16:40:31 -07003096void LocationsBuilderARMVIXL::VisitConstructorFence(HConstructorFence* constructor_fence) {
3097 constructor_fence->SetLocations(nullptr);
3098}
3099
3100void InstructionCodeGeneratorARMVIXL::VisitConstructorFence(
3101 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
3102 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3103}
3104
Scott Wakelingfe885462016-09-22 10:24:38 +01003105void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3106 memory_barrier->SetLocations(nullptr);
3107}
3108
3109void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3110 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
3111}
3112
3113void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) {
3114 ret->SetLocations(nullptr);
3115}
3116
3117void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
3118 codegen_->GenerateFrameExit();
3119}
3120
3121void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) {
3122 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003123 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003124 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
3125}
3126
3127void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
3128 codegen_->GenerateFrameExit();
3129}
3130
Artem Serovcfbe9132016-10-14 15:58:56 +01003131void LocationsBuilderARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3132 // The trampoline uses the same calling convention as dex calling conventions,
3133 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3134 // the method_idx.
3135 HandleInvoke(invoke);
3136}
3137
3138void InstructionCodeGeneratorARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3139 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08003140 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 3);
Artem Serovcfbe9132016-10-14 15:58:56 +01003141}
3142
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003143void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
3144 // Explicit clinit checks triggered by static invokes must have been pruned by
3145 // art::PrepareForRegisterAllocation.
3146 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
3147
Anton Kirilov5ec62182016-10-13 20:16:02 +01003148 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
3149 if (intrinsic.TryDispatch(invoke)) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003150 return;
3151 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003152
3153 HandleInvoke(invoke);
3154}
3155
Anton Kirilov5ec62182016-10-13 20:16:02 +01003156static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARMVIXL* codegen) {
3157 if (invoke->GetLocations()->Intrinsified()) {
3158 IntrinsicCodeGeneratorARMVIXL intrinsic(codegen);
3159 intrinsic.Dispatch(invoke);
3160 return true;
3161 }
3162 return false;
3163}
3164
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003165void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
3166 // Explicit clinit checks triggered by static invokes must have been pruned by
3167 // art::PrepareForRegisterAllocation.
3168 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
3169
Anton Kirilov5ec62182016-10-13 20:16:02 +01003170 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08003171 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 4);
Anton Kirilov5ec62182016-10-13 20:16:02 +01003172 return;
3173 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003174
3175 LocationSummary* locations = invoke->GetLocations();
Artem Serovd4cc5b22016-11-04 11:19:09 +00003176 codegen_->GenerateStaticOrDirectCall(
3177 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Roland Levillain5daa4952017-07-03 17:23:56 +01003178
Andreas Gampe3db70682018-12-26 15:12:03 -08003179 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 5);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003180}
3181
3182void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00003183 InvokeDexCallingConventionVisitorARMVIXL calling_convention_visitor;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003184 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3185}
3186
3187void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003188 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
3189 if (intrinsic.TryDispatch(invoke)) {
3190 return;
3191 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003192
3193 HandleInvoke(invoke);
3194}
3195
3196void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01003197 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08003198 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 6);
Anton Kirilov5ec62182016-10-13 20:16:02 +01003199 return;
3200 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003201
3202 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames374ddf32016-11-04 10:40:49 +00003203 DCHECK(!codegen_->IsLeafMethod());
Roland Levillain5daa4952017-07-03 17:23:56 +01003204
Andreas Gampe3db70682018-12-26 15:12:03 -08003205 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 7);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003206}
3207
Artem Serovcfbe9132016-10-14 15:58:56 +01003208void LocationsBuilderARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
3209 HandleInvoke(invoke);
3210 // Add the hidden argument.
3211 invoke->GetLocations()->AddTemp(LocationFrom(r12));
3212}
3213
3214void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
3215 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3216 LocationSummary* locations = invoke->GetLocations();
3217 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3218 vixl32::Register hidden_reg = RegisterFrom(locations->GetTemp(1));
3219 Location receiver = locations->InAt(0);
3220 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3221
3222 DCHECK(!receiver.IsStackSlot());
3223
Alexandre Rames374ddf32016-11-04 10:40:49 +00003224 // Ensure the pc position is recorded immediately after the `ldr` instruction.
3225 {
Artem Serov0fb37192016-12-06 18:13:40 +00003226 ExactAssemblyScope aas(GetVIXLAssembler(),
3227 vixl32::kMaxInstructionSizeInBytes,
3228 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00003229 // /* HeapReference<Class> */ temp = receiver->klass_
3230 __ ldr(temp, MemOperand(RegisterFrom(receiver), class_offset));
3231 codegen_->MaybeRecordImplicitNullCheck(invoke);
3232 }
Artem Serovcfbe9132016-10-14 15:58:56 +01003233 // Instead of simply (possibly) unpoisoning `temp` here, we should
3234 // emit a read barrier for the previous class reference load.
3235 // However this is not required in practice, as this is an
3236 // intermediate/temporary reference and because the current
3237 // concurrent copying collector keeps the from-space memory
3238 // intact/accessible until the end of the marking phase (the
3239 // concurrent copying collector may not in the future).
3240 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3241 GetAssembler()->LoadFromOffset(kLoadWord,
3242 temp,
3243 temp,
3244 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
3245 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
3246 invoke->GetImtIndex(), kArmPointerSize));
3247 // temp = temp->GetImtEntryAt(method_offset);
3248 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
3249 uint32_t entry_point =
3250 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value();
3251 // LR = temp->GetEntryPoint();
3252 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
3253
3254 // Set the hidden (in r12) argument. It is done here, right before a BLX to prevent other
3255 // instruction from clobbering it as they might use r12 as a scratch register.
3256 DCHECK(hidden_reg.Is(r12));
Scott Wakelingb77051e2016-11-21 19:46:00 +00003257
3258 {
3259 // The VIXL macro assembler may clobber any of the scratch registers that are available to it,
3260 // so it checks if the application is using them (by passing them to the macro assembler
3261 // methods). The following application of UseScratchRegisterScope corrects VIXL's notion of
3262 // what is available, and is the opposite of the standard usage: Instead of requesting a
3263 // temporary location, it imposes an external constraint (i.e. a specific register is reserved
3264 // for the hidden argument). Note that this works even if VIXL needs a scratch register itself
3265 // (to materialize the constant), since the destination register becomes available for such use
3266 // internally for the duration of the macro instruction.
3267 UseScratchRegisterScope temps(GetVIXLAssembler());
3268 temps.Exclude(hidden_reg);
3269 __ Mov(hidden_reg, invoke->GetDexMethodIndex());
3270 }
Artem Serovcfbe9132016-10-14 15:58:56 +01003271 {
Alexandre Rames374ddf32016-11-04 10:40:49 +00003272 // Ensure the pc position is recorded immediately after the `blx` instruction.
3273 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
Artem Serov0fb37192016-12-06 18:13:40 +00003274 ExactAssemblyScope aas(GetVIXLAssembler(),
Alexandre Rames374ddf32016-11-04 10:40:49 +00003275 vixl32::k16BitT32InstructionSizeInBytes,
3276 CodeBufferCheckScope::kExactSize);
Artem Serovcfbe9132016-10-14 15:58:56 +01003277 // LR();
3278 __ blx(lr);
Artem Serovcfbe9132016-10-14 15:58:56 +01003279 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames374ddf32016-11-04 10:40:49 +00003280 DCHECK(!codegen_->IsLeafMethod());
Artem Serovcfbe9132016-10-14 15:58:56 +01003281 }
Roland Levillain5daa4952017-07-03 17:23:56 +01003282
Andreas Gampe3db70682018-12-26 15:12:03 -08003283 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 8);
Artem Serovcfbe9132016-10-14 15:58:56 +01003284}
3285
Orion Hodsonac141392017-01-13 11:53:47 +00003286void LocationsBuilderARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3287 HandleInvoke(invoke);
3288}
3289
3290void InstructionCodeGeneratorARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3291 codegen_->GenerateInvokePolymorphicCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08003292 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 9);
Orion Hodsonac141392017-01-13 11:53:47 +00003293}
3294
Orion Hodson4c8e12e2018-05-18 08:33:20 +01003295void LocationsBuilderARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) {
3296 HandleInvoke(invoke);
3297}
3298
3299void InstructionCodeGeneratorARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) {
3300 codegen_->GenerateInvokeCustomCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08003301 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 10);
Orion Hodson4c8e12e2018-05-18 08:33:20 +01003302}
3303
Artem Serov02109dd2016-09-23 17:17:54 +01003304void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) {
3305 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003306 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Artem Serov02109dd2016-09-23 17:17:54 +01003307 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003308 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01003309 locations->SetInAt(0, Location::RequiresRegister());
3310 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3311 break;
3312 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003313 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01003314 locations->SetInAt(0, Location::RequiresRegister());
3315 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3316 break;
3317 }
3318
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003319 case DataType::Type::kFloat32:
3320 case DataType::Type::kFloat64:
Artem Serov02109dd2016-09-23 17:17:54 +01003321 locations->SetInAt(0, Location::RequiresFpuRegister());
3322 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3323 break;
3324
3325 default:
3326 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3327 }
3328}
3329
3330void InstructionCodeGeneratorARMVIXL::VisitNeg(HNeg* neg) {
3331 LocationSummary* locations = neg->GetLocations();
3332 Location out = locations->Out();
3333 Location in = locations->InAt(0);
3334 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003335 case DataType::Type::kInt32:
Artem Serov02109dd2016-09-23 17:17:54 +01003336 __ Rsb(OutputRegister(neg), InputRegisterAt(neg, 0), 0);
3337 break;
3338
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003339 case DataType::Type::kInt64:
Artem Serov02109dd2016-09-23 17:17:54 +01003340 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
3341 __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0);
3342 // We cannot emit an RSC (Reverse Subtract with Carry)
3343 // instruction here, as it does not exist in the Thumb-2
3344 // instruction set. We use the following approach
3345 // using SBC and SUB instead.
3346 //
3347 // out.hi = -C
3348 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out));
3349 // out.hi = out.hi - in.hi
3350 __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in));
3351 break;
3352
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003353 case DataType::Type::kFloat32:
3354 case DataType::Type::kFloat64:
Anton Kirilov644032c2016-12-06 17:51:43 +00003355 __ Vneg(OutputVRegister(neg), InputVRegister(neg));
Artem Serov02109dd2016-09-23 17:17:54 +01003356 break;
3357
3358 default:
3359 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3360 }
3361}
3362
Scott Wakelingfe885462016-09-22 10:24:38 +01003363void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003364 DataType::Type result_type = conversion->GetResultType();
3365 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003366 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3367 << input_type << " -> " << result_type;
Scott Wakelingfe885462016-09-22 10:24:38 +01003368
3369 // The float-to-long, double-to-long and long-to-float type conversions
3370 // rely on a call to the runtime.
3371 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003372 (((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
3373 && result_type == DataType::Type::kInt64)
3374 || (input_type == DataType::Type::kInt64 && result_type == DataType::Type::kFloat32))
Scott Wakelingfe885462016-09-22 10:24:38 +01003375 ? LocationSummary::kCallOnMainOnly
3376 : LocationSummary::kNoCall;
3377 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003378 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Scott Wakelingfe885462016-09-22 10:24:38 +01003379
Scott Wakelingfe885462016-09-22 10:24:38 +01003380 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003381 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003382 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003383 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003384 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003385 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
3386 locations->SetInAt(0, Location::RequiresRegister());
3387 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Scott Wakelingfe885462016-09-22 10:24:38 +01003388 break;
3389
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003390 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003391 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003392 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003393 locations->SetInAt(0, Location::Any());
3394 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3395 break;
3396
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003397 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003398 locations->SetInAt(0, Location::RequiresFpuRegister());
3399 locations->SetOut(Location::RequiresRegister());
3400 locations->AddTemp(Location::RequiresFpuRegister());
3401 break;
3402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003403 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003404 locations->SetInAt(0, Location::RequiresFpuRegister());
3405 locations->SetOut(Location::RequiresRegister());
3406 locations->AddTemp(Location::RequiresFpuRegister());
3407 break;
3408
3409 default:
3410 LOG(FATAL) << "Unexpected type conversion from " << input_type
3411 << " to " << result_type;
3412 }
3413 break;
3414
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003415 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003416 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003417 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003418 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003419 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003420 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003421 case DataType::Type::kInt16:
3422 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003423 locations->SetInAt(0, Location::RequiresRegister());
3424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3425 break;
3426
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003427 case DataType::Type::kFloat32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003428 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3429 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
3430 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003431 break;
3432 }
3433
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003434 case DataType::Type::kFloat64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003435 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3436 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0),
3437 calling_convention.GetFpuRegisterAt(1)));
3438 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003439 break;
3440 }
3441
3442 default:
3443 LOG(FATAL) << "Unexpected type conversion from " << input_type
3444 << " to " << result_type;
3445 }
3446 break;
3447
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003448 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003449 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003450 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003451 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003452 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003453 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003454 case DataType::Type::kInt16:
3455 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003456 locations->SetInAt(0, Location::RequiresRegister());
3457 locations->SetOut(Location::RequiresFpuRegister());
3458 break;
3459
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003460 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003461 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3462 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0),
3463 calling_convention.GetRegisterAt(1)));
3464 locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01003465 break;
3466 }
3467
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003468 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003469 locations->SetInAt(0, Location::RequiresFpuRegister());
3470 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3471 break;
3472
3473 default:
3474 LOG(FATAL) << "Unexpected type conversion from " << input_type
3475 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003476 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003477 break;
3478
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003479 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003480 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003481 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003482 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003483 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003484 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003485 case DataType::Type::kInt16:
3486 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003487 locations->SetInAt(0, Location::RequiresRegister());
3488 locations->SetOut(Location::RequiresFpuRegister());
3489 break;
3490
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003491 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003492 locations->SetInAt(0, Location::RequiresRegister());
3493 locations->SetOut(Location::RequiresFpuRegister());
3494 locations->AddTemp(Location::RequiresFpuRegister());
3495 locations->AddTemp(Location::RequiresFpuRegister());
3496 break;
3497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003498 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003499 locations->SetInAt(0, Location::RequiresFpuRegister());
3500 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3501 break;
3502
3503 default:
3504 LOG(FATAL) << "Unexpected type conversion from " << input_type
3505 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003506 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003507 break;
3508
3509 default:
3510 LOG(FATAL) << "Unexpected type conversion from " << input_type
3511 << " to " << result_type;
3512 }
3513}
3514
3515void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
3516 LocationSummary* locations = conversion->GetLocations();
3517 Location out = locations->Out();
3518 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003519 DataType::Type result_type = conversion->GetResultType();
3520 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003521 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
3522 << input_type << " -> " << result_type;
Scott Wakelingfe885462016-09-22 10:24:38 +01003523 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003524 case DataType::Type::kUint8:
Scott Wakelingfe885462016-09-22 10:24:38 +01003525 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003526 case DataType::Type::kInt8:
3527 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003528 case DataType::Type::kInt16:
3529 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003530 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
3531 break;
3532 case DataType::Type::kInt64:
3533 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
3534 break;
3535
3536 default:
3537 LOG(FATAL) << "Unexpected type conversion from " << input_type
3538 << " to " << result_type;
3539 }
3540 break;
3541
3542 case DataType::Type::kInt8:
3543 switch (input_type) {
3544 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003545 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003546 case DataType::Type::kInt16:
3547 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003548 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
3549 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003550 case DataType::Type::kInt64:
3551 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
3552 break;
3553
3554 default:
3555 LOG(FATAL) << "Unexpected type conversion from " << input_type
3556 << " to " << result_type;
3557 }
3558 break;
3559
3560 case DataType::Type::kUint16:
3561 switch (input_type) {
3562 case DataType::Type::kInt8:
3563 case DataType::Type::kInt16:
3564 case DataType::Type::kInt32:
3565 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
3566 break;
3567 case DataType::Type::kInt64:
3568 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
3569 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003570
3571 default:
3572 LOG(FATAL) << "Unexpected type conversion from " << input_type
3573 << " to " << result_type;
3574 }
3575 break;
3576
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003577 case DataType::Type::kInt16:
Scott Wakelingfe885462016-09-22 10:24:38 +01003578 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003579 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003580 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003581 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
3582 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003583 case DataType::Type::kInt64:
3584 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
3585 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003586
3587 default:
3588 LOG(FATAL) << "Unexpected type conversion from " << input_type
3589 << " to " << result_type;
3590 }
3591 break;
3592
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003593 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003594 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003595 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003596 DCHECK(out.IsRegister());
3597 if (in.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003598 __ Mov(OutputRegister(conversion), LowRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003599 } else if (in.IsDoubleStackSlot()) {
3600 GetAssembler()->LoadFromOffset(kLoadWord,
3601 OutputRegister(conversion),
3602 sp,
3603 in.GetStackIndex());
3604 } else {
3605 DCHECK(in.IsConstant());
3606 DCHECK(in.GetConstant()->IsLongConstant());
Vladimir Markoba1a48e2017-04-13 11:50:14 +01003607 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
3608 __ Mov(OutputRegister(conversion), static_cast<int32_t>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003609 }
3610 break;
3611
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003612 case DataType::Type::kFloat32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003613 vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003614 __ Vcvt(S32, F32, temp, InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01003615 __ Vmov(OutputRegister(conversion), temp);
3616 break;
3617 }
3618
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003619 case DataType::Type::kFloat64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003620 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003621 __ Vcvt(S32, F64, temp_s, DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003622 __ Vmov(OutputRegister(conversion), temp_s);
3623 break;
3624 }
3625
3626 default:
3627 LOG(FATAL) << "Unexpected type conversion from " << input_type
3628 << " to " << result_type;
3629 }
3630 break;
3631
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003632 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003633 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003634 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003635 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003636 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003637 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003638 case DataType::Type::kInt16:
3639 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003640 DCHECK(out.IsRegisterPair());
3641 DCHECK(in.IsRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003642 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01003643 // Sign extension.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003644 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
Scott Wakelingfe885462016-09-22 10:24:38 +01003645 break;
3646
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003647 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003648 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
3649 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
3650 break;
3651
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003652 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003653 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
3654 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
3655 break;
3656
3657 default:
3658 LOG(FATAL) << "Unexpected type conversion from " << input_type
3659 << " to " << result_type;
3660 }
3661 break;
3662
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003663 case DataType::Type::kFloat32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003664 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003665 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003666 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003667 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003668 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003669 case DataType::Type::kInt16:
3670 case DataType::Type::kInt32:
Scott Wakelingfe885462016-09-22 10:24:38 +01003671 __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003672 __ Vcvt(F32, S32, OutputSRegister(conversion), OutputSRegister(conversion));
Scott Wakelingfe885462016-09-22 10:24:38 +01003673 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003674
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003675 case DataType::Type::kInt64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003676 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
3677 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
3678 break;
3679
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003680 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01003681 __ Vcvt(F32, F64, OutputSRegister(conversion), DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01003682 break;
3683
3684 default:
3685 LOG(FATAL) << "Unexpected type conversion from " << input_type
3686 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003687 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003688 break;
3689
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003690 case DataType::Type::kFloat64:
Scott Wakelingfe885462016-09-22 10:24:38 +01003691 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003692 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003693 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003694 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003695 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003696 case DataType::Type::kInt16:
3697 case DataType::Type::kInt32:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003698 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003699 __ Vcvt(F64, S32, DRegisterFrom(out), LowSRegisterFrom(out));
Scott Wakelingfe885462016-09-22 10:24:38 +01003700 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003701
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003702 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003703 vixl32::Register low = LowRegisterFrom(in);
3704 vixl32::Register high = HighRegisterFrom(in);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003705 vixl32::SRegister out_s = LowSRegisterFrom(out);
Scott Wakelingc34dba72016-10-03 10:14:44 +01003706 vixl32::DRegister out_d = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003707 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01003708 vixl32::DRegister temp_d = DRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003709 vixl32::DRegister constant_d = DRegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003710
3711 // temp_d = int-to-double(high)
3712 __ Vmov(temp_s, high);
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01003713 __ Vcvt(F64, S32, temp_d, temp_s);
Scott Wakelingfe885462016-09-22 10:24:38 +01003714 // constant_d = k2Pow32EncodingForDouble
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003715 __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
Scott Wakelingfe885462016-09-22 10:24:38 +01003716 // out_d = unsigned-to-double(low)
3717 __ Vmov(out_s, low);
3718 __ Vcvt(F64, U32, out_d, out_s);
3719 // out_d += temp_d * constant_d
3720 __ Vmla(F64, out_d, temp_d, constant_d);
3721 break;
3722 }
3723
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003724 case DataType::Type::kFloat32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01003725 __ Vcvt(F64, F32, DRegisterFrom(out), InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01003726 break;
3727
3728 default:
3729 LOG(FATAL) << "Unexpected type conversion from " << input_type
3730 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003731 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003732 break;
3733
3734 default:
3735 LOG(FATAL) << "Unexpected type conversion from " << input_type
3736 << " to " << result_type;
3737 }
3738}
3739
3740void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) {
3741 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003742 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003743 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003744 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003745 locations->SetInAt(0, Location::RequiresRegister());
3746 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3747 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3748 break;
3749 }
3750
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003751 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003752 locations->SetInAt(0, Location::RequiresRegister());
Anton Kirilovdda43962016-11-21 19:55:20 +00003753 locations->SetInAt(1, ArmEncodableConstantOrRegister(add->InputAt(1), ADD));
Scott Wakelingfe885462016-09-22 10:24:38 +01003754 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3755 break;
3756 }
3757
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003758 case DataType::Type::kFloat32:
3759 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003760 locations->SetInAt(0, Location::RequiresFpuRegister());
3761 locations->SetInAt(1, Location::RequiresFpuRegister());
3762 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3763 break;
3764 }
3765
3766 default:
3767 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
3768 }
3769}
3770
3771void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) {
3772 LocationSummary* locations = add->GetLocations();
3773 Location out = locations->Out();
3774 Location first = locations->InAt(0);
3775 Location second = locations->InAt(1);
3776
3777 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003778 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003779 __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1));
3780 }
3781 break;
3782
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003783 case DataType::Type::kInt64: {
Anton Kirilovdda43962016-11-21 19:55:20 +00003784 if (second.IsConstant()) {
3785 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
3786 GenerateAddLongConst(out, first, value);
3787 } else {
3788 DCHECK(second.IsRegisterPair());
3789 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
3790 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
3791 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003792 break;
3793 }
3794
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003795 case DataType::Type::kFloat32:
3796 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003797 __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003798 break;
3799
3800 default:
3801 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
3802 }
3803}
3804
3805void LocationsBuilderARMVIXL::VisitSub(HSub* sub) {
3806 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003807 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003808 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003809 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003810 locations->SetInAt(0, Location::RequiresRegister());
3811 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
3812 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3813 break;
3814 }
3815
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003816 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003817 locations->SetInAt(0, Location::RequiresRegister());
Anton Kirilovdda43962016-11-21 19:55:20 +00003818 locations->SetInAt(1, ArmEncodableConstantOrRegister(sub->InputAt(1), SUB));
Scott Wakelingfe885462016-09-22 10:24:38 +01003819 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3820 break;
3821 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003822 case DataType::Type::kFloat32:
3823 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003824 locations->SetInAt(0, Location::RequiresFpuRegister());
3825 locations->SetInAt(1, Location::RequiresFpuRegister());
3826 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3827 break;
3828 }
3829 default:
3830 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
3831 }
3832}
3833
3834void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) {
3835 LocationSummary* locations = sub->GetLocations();
3836 Location out = locations->Out();
3837 Location first = locations->InAt(0);
3838 Location second = locations->InAt(1);
3839 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003840 case DataType::Type::kInt32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003841 __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003842 break;
3843 }
3844
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003845 case DataType::Type::kInt64: {
Anton Kirilovdda43962016-11-21 19:55:20 +00003846 if (second.IsConstant()) {
3847 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
3848 GenerateAddLongConst(out, first, -value);
3849 } else {
3850 DCHECK(second.IsRegisterPair());
3851 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
3852 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
3853 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003854 break;
3855 }
3856
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003857 case DataType::Type::kFloat32:
3858 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003859 __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003860 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003861
3862 default:
3863 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
3864 }
3865}
3866
3867void LocationsBuilderARMVIXL::VisitMul(HMul* mul) {
3868 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003869 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Scott Wakelingfe885462016-09-22 10:24:38 +01003870 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003871 case DataType::Type::kInt32:
3872 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003873 locations->SetInAt(0, Location::RequiresRegister());
3874 locations->SetInAt(1, Location::RequiresRegister());
3875 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3876 break;
3877 }
3878
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003879 case DataType::Type::kFloat32:
3880 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003881 locations->SetInAt(0, Location::RequiresFpuRegister());
3882 locations->SetInAt(1, Location::RequiresFpuRegister());
3883 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3884 break;
3885 }
3886
3887 default:
3888 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3889 }
3890}
3891
3892void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) {
3893 LocationSummary* locations = mul->GetLocations();
3894 Location out = locations->Out();
3895 Location first = locations->InAt(0);
3896 Location second = locations->InAt(1);
3897 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003898 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01003899 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
3900 break;
3901 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003902 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003903 vixl32::Register out_hi = HighRegisterFrom(out);
3904 vixl32::Register out_lo = LowRegisterFrom(out);
3905 vixl32::Register in1_hi = HighRegisterFrom(first);
3906 vixl32::Register in1_lo = LowRegisterFrom(first);
3907 vixl32::Register in2_hi = HighRegisterFrom(second);
3908 vixl32::Register in2_lo = LowRegisterFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01003909
3910 // Extra checks to protect caused by the existence of R1_R2.
3911 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
3912 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
Anton Kirilov644032c2016-12-06 17:51:43 +00003913 DCHECK(!out_hi.Is(in1_lo));
3914 DCHECK(!out_hi.Is(in2_lo));
Scott Wakelingfe885462016-09-22 10:24:38 +01003915
3916 // input: in1 - 64 bits, in2 - 64 bits
3917 // output: out
3918 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3919 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3920 // parts: out.lo = (in1.lo * in2.lo)[31:0]
3921
3922 UseScratchRegisterScope temps(GetVIXLAssembler());
3923 vixl32::Register temp = temps.Acquire();
3924 // temp <- in1.lo * in2.hi
3925 __ Mul(temp, in1_lo, in2_hi);
3926 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3927 __ Mla(out_hi, in1_hi, in2_lo, temp);
3928 // out.lo <- (in1.lo * in2.lo)[31:0];
3929 __ Umull(out_lo, temp, in1_lo, in2_lo);
3930 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003931 __ Add(out_hi, out_hi, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01003932 break;
3933 }
3934
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003935 case DataType::Type::kFloat32:
3936 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003937 __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01003938 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01003939
3940 default:
3941 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3942 }
3943}
3944
Scott Wakelingfe885462016-09-22 10:24:38 +01003945void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3946 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003947 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01003948
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003949 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01003950 DCHECK(second.IsConstant());
3951
3952 vixl32::Register out = OutputRegister(instruction);
3953 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Anton Kirilov644032c2016-12-06 17:51:43 +00003954 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01003955 DCHECK(imm == 1 || imm == -1);
3956
3957 if (instruction->IsRem()) {
3958 __ Mov(out, 0);
3959 } else {
3960 if (imm == 1) {
3961 __ Mov(out, dividend);
3962 } else {
3963 __ Rsb(out, dividend, 0);
3964 }
3965 }
3966}
3967
3968void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3969 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003970 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01003971
3972 LocationSummary* locations = instruction->GetLocations();
3973 Location second = locations->InAt(1);
3974 DCHECK(second.IsConstant());
3975
3976 vixl32::Register out = OutputRegister(instruction);
3977 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003978 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Anton Kirilov644032c2016-12-06 17:51:43 +00003979 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01003980 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
3981 int ctz_imm = CTZ(abs_imm);
3982
3983 if (ctz_imm == 1) {
3984 __ Lsr(temp, dividend, 32 - ctz_imm);
3985 } else {
3986 __ Asr(temp, dividend, 31);
3987 __ Lsr(temp, temp, 32 - ctz_imm);
3988 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003989 __ Add(out, temp, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01003990
3991 if (instruction->IsDiv()) {
3992 __ Asr(out, out, ctz_imm);
3993 if (imm < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003994 __ Rsb(out, out, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01003995 }
3996 } else {
3997 __ Ubfx(out, out, 0, ctz_imm);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003998 __ Sub(out, out, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01003999 }
4000}
4001
4002void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
4003 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004004 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004005
4006 LocationSummary* locations = instruction->GetLocations();
4007 Location second = locations->InAt(1);
4008 DCHECK(second.IsConstant());
4009
4010 vixl32::Register out = OutputRegister(instruction);
4011 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004012 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0));
4013 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1));
Scott Wakelingb77051e2016-11-21 19:46:00 +00004014 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004015
4016 int64_t magic;
4017 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08004018 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Scott Wakelingfe885462016-09-22 10:24:38 +01004019
Anton Kirilovdda43962016-11-21 19:55:20 +00004020 // TODO(VIXL): Change the static cast to Operand::From() after VIXL is fixed.
4021 __ Mov(temp1, static_cast<int32_t>(magic));
Scott Wakelingfe885462016-09-22 10:24:38 +01004022 __ Smull(temp2, temp1, dividend, temp1);
4023
4024 if (imm > 0 && magic < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004025 __ Add(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01004026 } else if (imm < 0 && magic > 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004027 __ Sub(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01004028 }
4029
4030 if (shift != 0) {
4031 __ Asr(temp1, temp1, shift);
4032 }
4033
4034 if (instruction->IsDiv()) {
4035 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
4036 } else {
4037 __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
4038 // TODO: Strength reduction for mls.
4039 __ Mov(temp2, imm);
4040 __ Mls(out, temp1, temp2, dividend);
4041 }
4042}
4043
4044void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral(
4045 HBinaryOperation* instruction) {
4046 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004047 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
Scott Wakelingfe885462016-09-22 10:24:38 +01004048
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004049 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01004050 DCHECK(second.IsConstant());
4051
Anton Kirilov644032c2016-12-06 17:51:43 +00004052 int32_t imm = Int32ConstantFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01004053 if (imm == 0) {
4054 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
4055 } else if (imm == 1 || imm == -1) {
4056 DivRemOneOrMinusOne(instruction);
4057 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
4058 DivRemByPowerOfTwo(instruction);
4059 } else {
4060 DCHECK(imm <= -2 || imm >= 2);
4061 GenerateDivRemWithAnyConstant(instruction);
4062 }
4063}
4064
4065void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) {
4066 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004067 if (div->GetResultType() == DataType::Type::kInt64) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004068 // pLdiv runtime call.
4069 call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004070 } else if (div->GetResultType() == DataType::Type::kInt32 && div->InputAt(1)->IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004071 // sdiv will be replaced by other instruction sequence.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004072 } else if (div->GetResultType() == DataType::Type::kInt32 &&
Scott Wakelingfe885462016-09-22 10:24:38 +01004073 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4074 // pIdivmod runtime call.
4075 call_kind = LocationSummary::kCallOnMainOnly;
4076 }
4077
Vladimir Markoca6fff82017-10-03 14:49:14 +01004078 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Scott Wakelingfe885462016-09-22 10:24:38 +01004079
4080 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004081 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004082 if (div->InputAt(1)->IsConstant()) {
4083 locations->SetInAt(0, Location::RequiresRegister());
4084 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
4085 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Anton Kirilov644032c2016-12-06 17:51:43 +00004086 int32_t value = Int32ConstantFrom(div->InputAt(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004087 if (value == 1 || value == 0 || value == -1) {
4088 // No temp register required.
4089 } else {
4090 locations->AddTemp(Location::RequiresRegister());
4091 if (!IsPowerOfTwo(AbsOrMin(value))) {
4092 locations->AddTemp(Location::RequiresRegister());
4093 }
4094 }
4095 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4096 locations->SetInAt(0, Location::RequiresRegister());
4097 locations->SetInAt(1, Location::RequiresRegister());
4098 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4099 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01004100 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4101 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4102 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004103 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Artem Serov551b28f2016-10-18 19:11:30 +01004104 // we only need the former.
4105 locations->SetOut(LocationFrom(r0));
Scott Wakelingfe885462016-09-22 10:24:38 +01004106 }
4107 break;
4108 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004109 case DataType::Type::kInt64: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004110 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4111 locations->SetInAt(0, LocationFrom(
4112 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4113 locations->SetInAt(1, LocationFrom(
4114 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4115 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004116 break;
4117 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004118 case DataType::Type::kFloat32:
4119 case DataType::Type::kFloat64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004120 locations->SetInAt(0, Location::RequiresFpuRegister());
4121 locations->SetInAt(1, Location::RequiresFpuRegister());
4122 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4123 break;
4124 }
4125
4126 default:
4127 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4128 }
4129}
4130
4131void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004132 Location lhs = div->GetLocations()->InAt(0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004133 Location rhs = div->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01004134
4135 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004136 case DataType::Type::kInt32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004137 if (rhs.IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004138 GenerateDivRemConstantIntegral(div);
4139 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4140 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
4141 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01004142 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4143 DCHECK(calling_convention.GetRegisterAt(0).Is(RegisterFrom(lhs)));
4144 DCHECK(calling_convention.GetRegisterAt(1).Is(RegisterFrom(rhs)));
4145 DCHECK(r0.Is(OutputRegister(div)));
4146
4147 codegen_->InvokeRuntime(kQuickIdivmod, div, div->GetDexPc());
4148 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01004149 }
4150 break;
4151 }
4152
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004153 case DataType::Type::kInt64: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004154 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4155 DCHECK(calling_convention.GetRegisterAt(0).Is(LowRegisterFrom(lhs)));
4156 DCHECK(calling_convention.GetRegisterAt(1).Is(HighRegisterFrom(lhs)));
4157 DCHECK(calling_convention.GetRegisterAt(2).Is(LowRegisterFrom(rhs)));
4158 DCHECK(calling_convention.GetRegisterAt(3).Is(HighRegisterFrom(rhs)));
4159 DCHECK(LowRegisterFrom(div->GetLocations()->Out()).Is(r0));
4160 DCHECK(HighRegisterFrom(div->GetLocations()->Out()).Is(r1));
4161
4162 codegen_->InvokeRuntime(kQuickLdiv, div, div->GetDexPc());
4163 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01004164 break;
4165 }
4166
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004167 case DataType::Type::kFloat32:
4168 case DataType::Type::kFloat64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004169 __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004170 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01004171
4172 default:
4173 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4174 }
4175}
4176
Artem Serov551b28f2016-10-18 19:11:30 +01004177void LocationsBuilderARMVIXL::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004178 DataType::Type type = rem->GetResultType();
Artem Serov551b28f2016-10-18 19:11:30 +01004179
4180 // Most remainders are implemented in the runtime.
4181 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004182 if (rem->GetResultType() == DataType::Type::kInt32 && rem->InputAt(1)->IsConstant()) {
Artem Serov551b28f2016-10-18 19:11:30 +01004183 // sdiv will be replaced by other instruction sequence.
4184 call_kind = LocationSummary::kNoCall;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004185 } else if ((rem->GetResultType() == DataType::Type::kInt32)
Artem Serov551b28f2016-10-18 19:11:30 +01004186 && codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4187 // Have hardware divide instruction for int, do it with three instructions.
4188 call_kind = LocationSummary::kNoCall;
4189 }
4190
Vladimir Markoca6fff82017-10-03 14:49:14 +01004191 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Artem Serov551b28f2016-10-18 19:11:30 +01004192
4193 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004194 case DataType::Type::kInt32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004195 if (rem->InputAt(1)->IsConstant()) {
4196 locations->SetInAt(0, Location::RequiresRegister());
4197 locations->SetInAt(1, Location::ConstantLocation(rem->InputAt(1)->AsConstant()));
4198 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Anton Kirilov644032c2016-12-06 17:51:43 +00004199 int32_t value = Int32ConstantFrom(rem->InputAt(1));
Artem Serov551b28f2016-10-18 19:11:30 +01004200 if (value == 1 || value == 0 || value == -1) {
4201 // No temp register required.
4202 } else {
4203 locations->AddTemp(Location::RequiresRegister());
4204 if (!IsPowerOfTwo(AbsOrMin(value))) {
4205 locations->AddTemp(Location::RequiresRegister());
4206 }
4207 }
4208 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4209 locations->SetInAt(0, Location::RequiresRegister());
4210 locations->SetInAt(1, Location::RequiresRegister());
4211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4212 locations->AddTemp(Location::RequiresRegister());
4213 } else {
4214 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4215 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4216 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004217 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Artem Serov551b28f2016-10-18 19:11:30 +01004218 // we only need the latter.
4219 locations->SetOut(LocationFrom(r1));
4220 }
4221 break;
4222 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004223 case DataType::Type::kInt64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004224 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4225 locations->SetInAt(0, LocationFrom(
4226 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4227 locations->SetInAt(1, LocationFrom(
4228 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4229 // The runtime helper puts the output in R2,R3.
4230 locations->SetOut(LocationFrom(r2, r3));
4231 break;
4232 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004233 case DataType::Type::kFloat32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004234 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4235 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4236 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4237 locations->SetOut(LocationFrom(s0));
4238 break;
4239 }
4240
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004241 case DataType::Type::kFloat64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004242 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4243 locations->SetInAt(0, LocationFrom(
4244 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
4245 locations->SetInAt(1, LocationFrom(
4246 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
4247 locations->SetOut(LocationFrom(s0, s1));
4248 break;
4249 }
4250
4251 default:
4252 LOG(FATAL) << "Unexpected rem type " << type;
4253 }
4254}
4255
4256void InstructionCodeGeneratorARMVIXL::VisitRem(HRem* rem) {
4257 LocationSummary* locations = rem->GetLocations();
4258 Location second = locations->InAt(1);
4259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004260 DataType::Type type = rem->GetResultType();
Artem Serov551b28f2016-10-18 19:11:30 +01004261 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004262 case DataType::Type::kInt32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004263 vixl32::Register reg1 = InputRegisterAt(rem, 0);
4264 vixl32::Register out_reg = OutputRegister(rem);
4265 if (second.IsConstant()) {
4266 GenerateDivRemConstantIntegral(rem);
4267 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4268 vixl32::Register reg2 = RegisterFrom(second);
4269 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
4270
4271 // temp = reg1 / reg2 (integer division)
4272 // dest = reg1 - temp * reg2
4273 __ Sdiv(temp, reg1, reg2);
4274 __ Mls(out_reg, temp, reg2, reg1);
4275 } else {
4276 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4277 DCHECK(reg1.Is(calling_convention.GetRegisterAt(0)));
4278 DCHECK(RegisterFrom(second).Is(calling_convention.GetRegisterAt(1)));
4279 DCHECK(out_reg.Is(r1));
4280
4281 codegen_->InvokeRuntime(kQuickIdivmod, rem, rem->GetDexPc());
4282 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
4283 }
4284 break;
4285 }
4286
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004287 case DataType::Type::kInt64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004288 codegen_->InvokeRuntime(kQuickLmod, rem, rem->GetDexPc());
4289 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
4290 break;
4291 }
4292
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004293 case DataType::Type::kFloat32: {
Artem Serov551b28f2016-10-18 19:11:30 +01004294 codegen_->InvokeRuntime(kQuickFmodf, rem, rem->GetDexPc());
4295 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4296 break;
4297 }
4298
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004299 case DataType::Type::kFloat64: {
Artem Serov551b28f2016-10-18 19:11:30 +01004300 codegen_->InvokeRuntime(kQuickFmod, rem, rem->GetDexPc());
4301 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4302 break;
4303 }
4304
4305 default:
4306 LOG(FATAL) << "Unexpected rem type " << type;
4307 }
4308}
4309
Aart Bik1f8d51b2018-02-15 10:42:37 -08004310static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4311 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4312 switch (minmax->GetResultType()) {
4313 case DataType::Type::kInt32:
4314 locations->SetInAt(0, Location::RequiresRegister());
4315 locations->SetInAt(1, Location::RequiresRegister());
4316 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4317 break;
4318 case DataType::Type::kInt64:
4319 locations->SetInAt(0, Location::RequiresRegister());
4320 locations->SetInAt(1, Location::RequiresRegister());
4321 locations->SetOut(Location::SameAsFirstInput());
4322 break;
4323 case DataType::Type::kFloat32:
4324 locations->SetInAt(0, Location::RequiresFpuRegister());
4325 locations->SetInAt(1, Location::RequiresFpuRegister());
4326 locations->SetOut(Location::SameAsFirstInput());
4327 locations->AddTemp(Location::RequiresRegister());
4328 break;
4329 case DataType::Type::kFloat64:
4330 locations->SetInAt(0, Location::RequiresFpuRegister());
4331 locations->SetInAt(1, Location::RequiresFpuRegister());
4332 locations->SetOut(Location::SameAsFirstInput());
4333 break;
4334 default:
4335 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4336 }
4337}
4338
Aart Bik351df3e2018-03-07 11:54:57 -08004339void InstructionCodeGeneratorARMVIXL::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004340 Location op1_loc = locations->InAt(0);
4341 Location op2_loc = locations->InAt(1);
4342 Location out_loc = locations->Out();
4343
4344 vixl32::Register op1 = RegisterFrom(op1_loc);
4345 vixl32::Register op2 = RegisterFrom(op2_loc);
4346 vixl32::Register out = RegisterFrom(out_loc);
4347
4348 __ Cmp(op1, op2);
4349
4350 {
4351 ExactAssemblyScope aas(GetVIXLAssembler(),
4352 3 * kMaxInstructionSizeInBytes,
4353 CodeBufferCheckScope::kMaximumSize);
4354
4355 __ ite(is_min ? lt : gt);
4356 __ mov(is_min ? lt : gt, out, op1);
4357 __ mov(is_min ? ge : le, out, op2);
4358 }
4359}
4360
4361void InstructionCodeGeneratorARMVIXL::GenerateMinMaxLong(LocationSummary* locations, bool is_min) {
4362 Location op1_loc = locations->InAt(0);
4363 Location op2_loc = locations->InAt(1);
4364 Location out_loc = locations->Out();
4365
4366 // Optimization: don't generate any code if inputs are the same.
4367 if (op1_loc.Equals(op2_loc)) {
4368 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in location builder.
4369 return;
4370 }
4371
4372 vixl32::Register op1_lo = LowRegisterFrom(op1_loc);
4373 vixl32::Register op1_hi = HighRegisterFrom(op1_loc);
4374 vixl32::Register op2_lo = LowRegisterFrom(op2_loc);
4375 vixl32::Register op2_hi = HighRegisterFrom(op2_loc);
4376 vixl32::Register out_lo = LowRegisterFrom(out_loc);
4377 vixl32::Register out_hi = HighRegisterFrom(out_loc);
4378 UseScratchRegisterScope temps(GetVIXLAssembler());
4379 const vixl32::Register temp = temps.Acquire();
4380
4381 DCHECK(op1_lo.Is(out_lo));
4382 DCHECK(op1_hi.Is(out_hi));
4383
4384 // Compare op1 >= op2, or op1 < op2.
4385 __ Cmp(out_lo, op2_lo);
4386 __ Sbcs(temp, out_hi, op2_hi);
4387
4388 // Now GE/LT condition code is correct for the long comparison.
4389 {
4390 vixl32::ConditionType cond = is_min ? ge : lt;
4391 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4392 3 * kMaxInstructionSizeInBytes,
4393 CodeBufferCheckScope::kMaximumSize);
4394 __ itt(cond);
4395 __ mov(cond, out_lo, op2_lo);
4396 __ mov(cond, out_hi, op2_hi);
4397 }
4398}
4399
Aart Bik351df3e2018-03-07 11:54:57 -08004400void InstructionCodeGeneratorARMVIXL::GenerateMinMaxFloat(HInstruction* minmax, bool is_min) {
4401 LocationSummary* locations = minmax->GetLocations();
Aart Bik1f8d51b2018-02-15 10:42:37 -08004402 Location op1_loc = locations->InAt(0);
4403 Location op2_loc = locations->InAt(1);
4404 Location out_loc = locations->Out();
4405
4406 // Optimization: don't generate any code if inputs are the same.
4407 if (op1_loc.Equals(op2_loc)) {
4408 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in location builder.
4409 return;
4410 }
4411
4412 vixl32::SRegister op1 = SRegisterFrom(op1_loc);
4413 vixl32::SRegister op2 = SRegisterFrom(op2_loc);
4414 vixl32::SRegister out = SRegisterFrom(out_loc);
4415
4416 UseScratchRegisterScope temps(GetVIXLAssembler());
4417 const vixl32::Register temp1 = temps.Acquire();
4418 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(0));
4419 vixl32::Label nan, done;
Aart Bik351df3e2018-03-07 11:54:57 -08004420 vixl32::Label* final_label = codegen_->GetFinalLabel(minmax, &done);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004421
4422 DCHECK(op1.Is(out));
4423
4424 __ Vcmp(op1, op2);
4425 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
Andreas Gampe3db70682018-12-26 15:12:03 -08004426 __ B(vs, &nan, /* is_far_target= */ false); // if un-ordered, go to NaN handling.
Aart Bik1f8d51b2018-02-15 10:42:37 -08004427
4428 // op1 <> op2
4429 vixl32::ConditionType cond = is_min ? gt : lt;
4430 {
4431 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4432 2 * kMaxInstructionSizeInBytes,
4433 CodeBufferCheckScope::kMaximumSize);
4434 __ it(cond);
4435 __ vmov(cond, F32, out, op2);
4436 }
4437 // for <>(not equal), we've done min/max calculation.
Andreas Gampe3db70682018-12-26 15:12:03 -08004438 __ B(ne, final_label, /* is_far_target= */ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004439
4440 // handle op1 == op2, max(+0.0,-0.0), min(+0.0,-0.0).
4441 __ Vmov(temp1, op1);
4442 __ Vmov(temp2, op2);
4443 if (is_min) {
4444 __ Orr(temp1, temp1, temp2);
4445 } else {
4446 __ And(temp1, temp1, temp2);
4447 }
4448 __ Vmov(out, temp1);
4449 __ B(final_label);
4450
4451 // handle NaN input.
4452 __ Bind(&nan);
4453 __ Movt(temp1, High16Bits(kNanFloat)); // 0x7FC0xxxx is a NaN.
4454 __ Vmov(out, temp1);
4455
4456 if (done.IsReferenced()) {
4457 __ Bind(&done);
4458 }
4459}
4460
Aart Bik351df3e2018-03-07 11:54:57 -08004461void InstructionCodeGeneratorARMVIXL::GenerateMinMaxDouble(HInstruction* minmax, bool is_min) {
4462 LocationSummary* locations = minmax->GetLocations();
Aart Bik1f8d51b2018-02-15 10:42:37 -08004463 Location op1_loc = locations->InAt(0);
4464 Location op2_loc = locations->InAt(1);
4465 Location out_loc = locations->Out();
4466
4467 // Optimization: don't generate any code if inputs are the same.
4468 if (op1_loc.Equals(op2_loc)) {
4469 DCHECK(out_loc.Equals(op1_loc)); // out_loc is set as SameAsFirstInput() in.
4470 return;
4471 }
4472
4473 vixl32::DRegister op1 = DRegisterFrom(op1_loc);
4474 vixl32::DRegister op2 = DRegisterFrom(op2_loc);
4475 vixl32::DRegister out = DRegisterFrom(out_loc);
4476 vixl32::Label handle_nan_eq, done;
Aart Bik351df3e2018-03-07 11:54:57 -08004477 vixl32::Label* final_label = codegen_->GetFinalLabel(minmax, &done);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004478
4479 DCHECK(op1.Is(out));
4480
4481 __ Vcmp(op1, op2);
4482 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
Andreas Gampe3db70682018-12-26 15:12:03 -08004483 __ B(vs, &handle_nan_eq, /* is_far_target= */ false); // if un-ordered, go to NaN handling.
Aart Bik1f8d51b2018-02-15 10:42:37 -08004484
4485 // op1 <> op2
4486 vixl32::ConditionType cond = is_min ? gt : lt;
4487 {
4488 ExactAssemblyScope it_scope(GetVIXLAssembler(),
4489 2 * kMaxInstructionSizeInBytes,
4490 CodeBufferCheckScope::kMaximumSize);
4491 __ it(cond);
4492 __ vmov(cond, F64, out, op2);
4493 }
4494 // for <>(not equal), we've done min/max calculation.
Andreas Gampe3db70682018-12-26 15:12:03 -08004495 __ B(ne, final_label, /* is_far_target= */ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004496
4497 // handle op1 == op2, max(+0.0,-0.0).
4498 if (!is_min) {
4499 __ Vand(F64, out, op1, op2);
4500 __ B(final_label);
4501 }
4502
4503 // handle op1 == op2, min(+0.0,-0.0), NaN input.
4504 __ Bind(&handle_nan_eq);
4505 __ Vorr(F64, out, op1, op2); // assemble op1/-0.0/NaN.
4506
4507 if (done.IsReferenced()) {
4508 __ Bind(&done);
4509 }
4510}
4511
Aart Bik351df3e2018-03-07 11:54:57 -08004512void InstructionCodeGeneratorARMVIXL::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4513 DataType::Type type = minmax->GetResultType();
4514 switch (type) {
4515 case DataType::Type::kInt32:
4516 GenerateMinMaxInt(minmax->GetLocations(), is_min);
4517 break;
4518 case DataType::Type::kInt64:
4519 GenerateMinMaxLong(minmax->GetLocations(), is_min);
4520 break;
4521 case DataType::Type::kFloat32:
4522 GenerateMinMaxFloat(minmax, is_min);
4523 break;
4524 case DataType::Type::kFloat64:
4525 GenerateMinMaxDouble(minmax, is_min);
4526 break;
4527 default:
4528 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4529 }
4530}
4531
Aart Bik1f8d51b2018-02-15 10:42:37 -08004532void LocationsBuilderARMVIXL::VisitMin(HMin* min) {
4533 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4534}
4535
4536void InstructionCodeGeneratorARMVIXL::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004537 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004538}
4539
4540void LocationsBuilderARMVIXL::VisitMax(HMax* max) {
4541 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4542}
4543
4544void InstructionCodeGeneratorARMVIXL::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004545 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004546}
4547
Aart Bik3dad3412018-02-28 12:01:46 -08004548void LocationsBuilderARMVIXL::VisitAbs(HAbs* abs) {
4549 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4550 switch (abs->GetResultType()) {
4551 case DataType::Type::kInt32:
4552 case DataType::Type::kInt64:
4553 locations->SetInAt(0, Location::RequiresRegister());
4554 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4555 locations->AddTemp(Location::RequiresRegister());
4556 break;
4557 case DataType::Type::kFloat32:
4558 case DataType::Type::kFloat64:
4559 locations->SetInAt(0, Location::RequiresFpuRegister());
4560 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4561 break;
4562 default:
4563 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
4564 }
4565}
4566
4567void InstructionCodeGeneratorARMVIXL::VisitAbs(HAbs* abs) {
4568 LocationSummary* locations = abs->GetLocations();
4569 switch (abs->GetResultType()) {
4570 case DataType::Type::kInt32: {
4571 vixl32::Register in_reg = RegisterFrom(locations->InAt(0));
4572 vixl32::Register out_reg = RegisterFrom(locations->Out());
4573 vixl32::Register mask = RegisterFrom(locations->GetTemp(0));
4574 __ Asr(mask, in_reg, 31);
4575 __ Add(out_reg, in_reg, mask);
4576 __ Eor(out_reg, out_reg, mask);
4577 break;
4578 }
4579 case DataType::Type::kInt64: {
4580 Location in = locations->InAt(0);
4581 vixl32::Register in_reg_lo = LowRegisterFrom(in);
4582 vixl32::Register in_reg_hi = HighRegisterFrom(in);
4583 Location output = locations->Out();
4584 vixl32::Register out_reg_lo = LowRegisterFrom(output);
4585 vixl32::Register out_reg_hi = HighRegisterFrom(output);
4586 DCHECK(!out_reg_lo.Is(in_reg_hi)) << "Diagonal overlap unexpected.";
4587 vixl32::Register mask = RegisterFrom(locations->GetTemp(0));
4588 __ Asr(mask, in_reg_hi, 31);
4589 __ Adds(out_reg_lo, in_reg_lo, mask);
4590 __ Adc(out_reg_hi, in_reg_hi, mask);
4591 __ Eor(out_reg_lo, out_reg_lo, mask);
4592 __ Eor(out_reg_hi, out_reg_hi, mask);
4593 break;
4594 }
4595 case DataType::Type::kFloat32:
4596 case DataType::Type::kFloat64:
4597 __ Vabs(OutputVRegister(abs), InputVRegisterAt(abs, 0));
4598 break;
4599 default:
4600 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
4601 }
4602}
Artem Serov551b28f2016-10-18 19:11:30 +01004603
Scott Wakelingfe885462016-09-22 10:24:38 +01004604void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Artem Serov657022c2016-11-23 14:19:38 +00004605 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Scott Wakelingfe885462016-09-22 10:24:38 +01004606 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01004607}
4608
4609void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
4610 DivZeroCheckSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004611 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARMVIXL(instruction);
Scott Wakelingfe885462016-09-22 10:24:38 +01004612 codegen_->AddSlowPath(slow_path);
4613
4614 LocationSummary* locations = instruction->GetLocations();
4615 Location value = locations->InAt(0);
4616
4617 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004618 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004619 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004620 case DataType::Type::kInt8:
4621 case DataType::Type::kUint16:
4622 case DataType::Type::kInt16:
4623 case DataType::Type::kInt32: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004624 if (value.IsRegister()) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00004625 __ CompareAndBranchIfZero(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
Scott Wakelingfe885462016-09-22 10:24:38 +01004626 } else {
4627 DCHECK(value.IsConstant()) << value;
Anton Kirilov644032c2016-12-06 17:51:43 +00004628 if (Int32ConstantFrom(value) == 0) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004629 __ B(slow_path->GetEntryLabel());
4630 }
4631 }
4632 break;
4633 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004634 case DataType::Type::kInt64: {
Scott Wakelingfe885462016-09-22 10:24:38 +01004635 if (value.IsRegisterPair()) {
4636 UseScratchRegisterScope temps(GetVIXLAssembler());
4637 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004638 __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004639 __ B(eq, slow_path->GetEntryLabel());
4640 } else {
4641 DCHECK(value.IsConstant()) << value;
Anton Kirilov644032c2016-12-06 17:51:43 +00004642 if (Int64ConstantFrom(value) == 0) {
Scott Wakelingfe885462016-09-22 10:24:38 +01004643 __ B(slow_path->GetEntryLabel());
4644 }
4645 }
4646 break;
4647 }
4648 default:
4649 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4650 }
4651}
4652
Artem Serov02109dd2016-09-23 17:17:54 +01004653void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HRor* ror) {
4654 LocationSummary* locations = ror->GetLocations();
4655 vixl32::Register in = InputRegisterAt(ror, 0);
4656 Location rhs = locations->InAt(1);
4657 vixl32::Register out = OutputRegister(ror);
4658
4659 if (rhs.IsConstant()) {
4660 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
4661 // so map all rotations to a +ve. equivalent in that range.
4662 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
4663 uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
4664 if (rot) {
4665 // Rotate, mapping left rotations to right equivalents if necessary.
4666 // (e.g. left by 2 bits == right by 30.)
4667 __ Ror(out, in, rot);
4668 } else if (!out.Is(in)) {
4669 __ Mov(out, in);
4670 }
4671 } else {
4672 __ Ror(out, in, RegisterFrom(rhs));
4673 }
4674}
4675
4676// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
4677// rotates by swapping input regs (effectively rotating by the first 32-bits of
4678// a larger rotation) or flipping direction (thus treating larger right/left
4679// rotations as sub-word sized rotations in the other direction) as appropriate.
4680void InstructionCodeGeneratorARMVIXL::HandleLongRotate(HRor* ror) {
4681 LocationSummary* locations = ror->GetLocations();
4682 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
4683 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
4684 Location rhs = locations->InAt(1);
4685 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
4686 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
4687
4688 if (rhs.IsConstant()) {
4689 uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant());
4690 // Map all rotations to +ve. equivalents on the interval [0,63].
4691 rot &= kMaxLongShiftDistance;
4692 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
4693 // logic below to a simple pair of binary orr.
4694 // (e.g. 34 bits == in_reg swap + 2 bits right.)
4695 if (rot >= kArmBitsPerWord) {
4696 rot -= kArmBitsPerWord;
4697 std::swap(in_reg_hi, in_reg_lo);
4698 }
4699 // Rotate, or mov to out for zero or word size rotations.
4700 if (rot != 0u) {
Scott Wakelingb77051e2016-11-21 19:46:00 +00004701 __ Lsr(out_reg_hi, in_reg_hi, Operand::From(rot));
Artem Serov02109dd2016-09-23 17:17:54 +01004702 __ Orr(out_reg_hi, out_reg_hi, Operand(in_reg_lo, ShiftType::LSL, kArmBitsPerWord - rot));
Scott Wakelingb77051e2016-11-21 19:46:00 +00004703 __ Lsr(out_reg_lo, in_reg_lo, Operand::From(rot));
Artem Serov02109dd2016-09-23 17:17:54 +01004704 __ Orr(out_reg_lo, out_reg_lo, Operand(in_reg_hi, ShiftType::LSL, kArmBitsPerWord - rot));
4705 } else {
4706 __ Mov(out_reg_lo, in_reg_lo);
4707 __ Mov(out_reg_hi, in_reg_hi);
4708 }
4709 } else {
4710 vixl32::Register shift_right = RegisterFrom(locations->GetTemp(0));
4711 vixl32::Register shift_left = RegisterFrom(locations->GetTemp(1));
4712 vixl32::Label end;
4713 vixl32::Label shift_by_32_plus_shift_right;
Anton Kirilov6f644202017-02-27 18:29:45 +00004714 vixl32::Label* final_label = codegen_->GetFinalLabel(ror, &end);
Artem Serov02109dd2016-09-23 17:17:54 +01004715
4716 __ And(shift_right, RegisterFrom(rhs), 0x1F);
4717 __ Lsrs(shift_left, RegisterFrom(rhs), 6);
Scott Wakelingbffdc702016-12-07 17:46:03 +00004718 __ Rsb(LeaveFlags, shift_left, shift_right, Operand::From(kArmBitsPerWord));
Andreas Gampe3db70682018-12-26 15:12:03 -08004719 __ B(cc, &shift_by_32_plus_shift_right, /* is_far_target= */ false);
Artem Serov02109dd2016-09-23 17:17:54 +01004720
4721 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
4722 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
4723 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
4724 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
4725 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
4726 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
4727 __ Lsr(shift_left, in_reg_hi, shift_right);
4728 __ Add(out_reg_lo, out_reg_lo, shift_left);
Anton Kirilov6f644202017-02-27 18:29:45 +00004729 __ B(final_label);
Artem Serov02109dd2016-09-23 17:17:54 +01004730
4731 __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right.
4732 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
4733 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
4734 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
4735 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
4736 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
4737 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
4738 __ Lsl(shift_right, in_reg_hi, shift_left);
4739 __ Add(out_reg_lo, out_reg_lo, shift_right);
4740
Anton Kirilov6f644202017-02-27 18:29:45 +00004741 if (end.IsReferenced()) {
4742 __ Bind(&end);
4743 }
Artem Serov02109dd2016-09-23 17:17:54 +01004744 }
4745}
4746
4747void LocationsBuilderARMVIXL::VisitRor(HRor* ror) {
4748 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004749 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Artem Serov02109dd2016-09-23 17:17:54 +01004750 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004751 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01004752 locations->SetInAt(0, Location::RequiresRegister());
4753 locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1)));
4754 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4755 break;
4756 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004757 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01004758 locations->SetInAt(0, Location::RequiresRegister());
4759 if (ror->InputAt(1)->IsConstant()) {
4760 locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant()));
4761 } else {
4762 locations->SetInAt(1, Location::RequiresRegister());
4763 locations->AddTemp(Location::RequiresRegister());
4764 locations->AddTemp(Location::RequiresRegister());
4765 }
4766 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4767 break;
4768 }
4769 default:
4770 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4771 }
4772}
4773
4774void InstructionCodeGeneratorARMVIXL::VisitRor(HRor* ror) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004775 DataType::Type type = ror->GetResultType();
Artem Serov02109dd2016-09-23 17:17:54 +01004776 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004777 case DataType::Type::kInt32: {
Artem Serov02109dd2016-09-23 17:17:54 +01004778 HandleIntegerRotate(ror);
4779 break;
4780 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004781 case DataType::Type::kInt64: {
Artem Serov02109dd2016-09-23 17:17:54 +01004782 HandleLongRotate(ror);
4783 break;
4784 }
4785 default:
4786 LOG(FATAL) << "Unexpected operation type " << type;
4787 UNREACHABLE();
4788 }
4789}
4790
Artem Serov02d37832016-10-25 15:25:33 +01004791void LocationsBuilderARMVIXL::HandleShift(HBinaryOperation* op) {
4792 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4793
4794 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004795 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01004796
4797 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004798 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01004799 locations->SetInAt(0, Location::RequiresRegister());
4800 if (op->InputAt(1)->IsConstant()) {
4801 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
4802 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4803 } else {
4804 locations->SetInAt(1, Location::RequiresRegister());
4805 // Make the output overlap, as it will be used to hold the masked
4806 // second input.
4807 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4808 }
4809 break;
4810 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004811 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01004812 locations->SetInAt(0, Location::RequiresRegister());
4813 if (op->InputAt(1)->IsConstant()) {
4814 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
4815 // For simplicity, use kOutputOverlap even though we only require that low registers
4816 // don't clash with high registers which the register allocator currently guarantees.
4817 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4818 } else {
4819 locations->SetInAt(1, Location::RequiresRegister());
4820 locations->AddTemp(Location::RequiresRegister());
4821 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4822 }
4823 break;
4824 }
4825 default:
4826 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4827 }
4828}
4829
4830void InstructionCodeGeneratorARMVIXL::HandleShift(HBinaryOperation* op) {
4831 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4832
4833 LocationSummary* locations = op->GetLocations();
4834 Location out = locations->Out();
4835 Location first = locations->InAt(0);
4836 Location second = locations->InAt(1);
4837
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004838 DataType::Type type = op->GetResultType();
Artem Serov02d37832016-10-25 15:25:33 +01004839 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004840 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01004841 vixl32::Register out_reg = OutputRegister(op);
4842 vixl32::Register first_reg = InputRegisterAt(op, 0);
4843 if (second.IsRegister()) {
4844 vixl32::Register second_reg = RegisterFrom(second);
4845 // ARM doesn't mask the shift count so we need to do it ourselves.
4846 __ And(out_reg, second_reg, kMaxIntShiftDistance);
4847 if (op->IsShl()) {
4848 __ Lsl(out_reg, first_reg, out_reg);
4849 } else if (op->IsShr()) {
4850 __ Asr(out_reg, first_reg, out_reg);
4851 } else {
4852 __ Lsr(out_reg, first_reg, out_reg);
4853 }
4854 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00004855 int32_t cst = Int32ConstantFrom(second);
Artem Serov02d37832016-10-25 15:25:33 +01004856 uint32_t shift_value = cst & kMaxIntShiftDistance;
4857 if (shift_value == 0) { // ARM does not support shifting with 0 immediate.
4858 __ Mov(out_reg, first_reg);
4859 } else if (op->IsShl()) {
4860 __ Lsl(out_reg, first_reg, shift_value);
4861 } else if (op->IsShr()) {
4862 __ Asr(out_reg, first_reg, shift_value);
4863 } else {
4864 __ Lsr(out_reg, first_reg, shift_value);
4865 }
4866 }
4867 break;
4868 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004869 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01004870 vixl32::Register o_h = HighRegisterFrom(out);
4871 vixl32::Register o_l = LowRegisterFrom(out);
4872
4873 vixl32::Register high = HighRegisterFrom(first);
4874 vixl32::Register low = LowRegisterFrom(first);
4875
4876 if (second.IsRegister()) {
4877 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
4878
4879 vixl32::Register second_reg = RegisterFrom(second);
4880
4881 if (op->IsShl()) {
4882 __ And(o_l, second_reg, kMaxLongShiftDistance);
4883 // Shift the high part
4884 __ Lsl(o_h, high, o_l);
4885 // Shift the low part and `or` what overflew on the high part
Scott Wakelingb77051e2016-11-21 19:46:00 +00004886 __ Rsb(temp, o_l, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004887 __ Lsr(temp, low, temp);
4888 __ Orr(o_h, o_h, temp);
4889 // If the shift is > 32 bits, override the high part
Scott Wakelingb77051e2016-11-21 19:46:00 +00004890 __ Subs(temp, o_l, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004891 {
Artem Serov0fb37192016-12-06 18:13:40 +00004892 ExactAssemblyScope guard(GetVIXLAssembler(),
4893 2 * vixl32::kMaxInstructionSizeInBytes,
4894 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01004895 __ it(pl);
4896 __ lsl(pl, o_h, low, temp);
4897 }
4898 // Shift the low part
4899 __ Lsl(o_l, low, o_l);
4900 } else if (op->IsShr()) {
4901 __ And(o_h, second_reg, kMaxLongShiftDistance);
4902 // Shift the low part
4903 __ Lsr(o_l, low, o_h);
4904 // Shift the high part and `or` what underflew on the low part
Scott Wakelingb77051e2016-11-21 19:46:00 +00004905 __ Rsb(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004906 __ Lsl(temp, high, temp);
4907 __ Orr(o_l, o_l, temp);
4908 // If the shift is > 32 bits, override the low part
Scott Wakelingb77051e2016-11-21 19:46:00 +00004909 __ Subs(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004910 {
Artem Serov0fb37192016-12-06 18:13:40 +00004911 ExactAssemblyScope guard(GetVIXLAssembler(),
4912 2 * vixl32::kMaxInstructionSizeInBytes,
4913 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01004914 __ it(pl);
4915 __ asr(pl, o_l, high, temp);
4916 }
4917 // Shift the high part
4918 __ Asr(o_h, high, o_h);
4919 } else {
4920 __ And(o_h, second_reg, kMaxLongShiftDistance);
4921 // same as Shr except we use `Lsr`s and not `Asr`s
4922 __ Lsr(o_l, low, o_h);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004923 __ Rsb(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004924 __ Lsl(temp, high, temp);
4925 __ Orr(o_l, o_l, temp);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004926 __ Subs(temp, o_h, Operand::From(kArmBitsPerWord));
Artem Serov02d37832016-10-25 15:25:33 +01004927 {
Artem Serov0fb37192016-12-06 18:13:40 +00004928 ExactAssemblyScope guard(GetVIXLAssembler(),
4929 2 * vixl32::kMaxInstructionSizeInBytes,
4930 CodeBufferCheckScope::kMaximumSize);
Artem Serov02d37832016-10-25 15:25:33 +01004931 __ it(pl);
4932 __ lsr(pl, o_l, high, temp);
4933 }
4934 __ Lsr(o_h, high, o_h);
4935 }
4936 } else {
4937 // Register allocator doesn't create partial overlap.
4938 DCHECK(!o_l.Is(high));
4939 DCHECK(!o_h.Is(low));
Anton Kirilov644032c2016-12-06 17:51:43 +00004940 int32_t cst = Int32ConstantFrom(second);
Artem Serov02d37832016-10-25 15:25:33 +01004941 uint32_t shift_value = cst & kMaxLongShiftDistance;
4942 if (shift_value > 32) {
4943 if (op->IsShl()) {
4944 __ Lsl(o_h, low, shift_value - 32);
4945 __ Mov(o_l, 0);
4946 } else if (op->IsShr()) {
4947 __ Asr(o_l, high, shift_value - 32);
4948 __ Asr(o_h, high, 31);
4949 } else {
4950 __ Lsr(o_l, high, shift_value - 32);
4951 __ Mov(o_h, 0);
4952 }
4953 } else if (shift_value == 32) {
4954 if (op->IsShl()) {
4955 __ Mov(o_h, low);
4956 __ Mov(o_l, 0);
4957 } else if (op->IsShr()) {
4958 __ Mov(o_l, high);
4959 __ Asr(o_h, high, 31);
4960 } else {
4961 __ Mov(o_l, high);
4962 __ Mov(o_h, 0);
4963 }
4964 } else if (shift_value == 1) {
4965 if (op->IsShl()) {
4966 __ Lsls(o_l, low, 1);
4967 __ Adc(o_h, high, high);
4968 } else if (op->IsShr()) {
4969 __ Asrs(o_h, high, 1);
4970 __ Rrx(o_l, low);
4971 } else {
4972 __ Lsrs(o_h, high, 1);
4973 __ Rrx(o_l, low);
4974 }
Nicolas Geoffray9b195cc2019-04-02 08:29:00 +01004975 } else if (shift_value == 0) {
4976 __ Mov(o_l, low);
4977 __ Mov(o_h, high);
Artem Serov02d37832016-10-25 15:25:33 +01004978 } else {
Nicolas Geoffray9b195cc2019-04-02 08:29:00 +01004979 DCHECK(0 < shift_value && shift_value < 32) << shift_value;
Artem Serov02d37832016-10-25 15:25:33 +01004980 if (op->IsShl()) {
4981 __ Lsl(o_h, high, shift_value);
4982 __ Orr(o_h, o_h, Operand(low, ShiftType::LSR, 32 - shift_value));
4983 __ Lsl(o_l, low, shift_value);
4984 } else if (op->IsShr()) {
4985 __ Lsr(o_l, low, shift_value);
4986 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
4987 __ Asr(o_h, high, shift_value);
4988 } else {
4989 __ Lsr(o_l, low, shift_value);
4990 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
4991 __ Lsr(o_h, high, shift_value);
4992 }
4993 }
4994 }
4995 break;
4996 }
4997 default:
4998 LOG(FATAL) << "Unexpected operation type " << type;
4999 UNREACHABLE();
5000 }
5001}
5002
5003void LocationsBuilderARMVIXL::VisitShl(HShl* shl) {
5004 HandleShift(shl);
5005}
5006
5007void InstructionCodeGeneratorARMVIXL::VisitShl(HShl* shl) {
5008 HandleShift(shl);
5009}
5010
5011void LocationsBuilderARMVIXL::VisitShr(HShr* shr) {
5012 HandleShift(shr);
5013}
5014
5015void InstructionCodeGeneratorARMVIXL::VisitShr(HShr* shr) {
5016 HandleShift(shr);
5017}
5018
5019void LocationsBuilderARMVIXL::VisitUShr(HUShr* ushr) {
5020 HandleShift(ushr);
5021}
5022
5023void InstructionCodeGeneratorARMVIXL::VisitUShr(HUShr* ushr) {
5024 HandleShift(ushr);
5025}
5026
5027void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005028 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5029 instruction, LocationSummary::kCallOnMainOnly);
Alex Lightd109e302018-06-27 10:25:41 -07005030 InvokeRuntimeCallingConventionARMVIXL calling_convention;
5031 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Artem Serov02d37832016-10-25 15:25:33 +01005032 locations->SetOut(LocationFrom(r0));
5033}
5034
5035void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005036 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5037 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005038 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 11);
Artem Serov02d37832016-10-25 15:25:33 +01005039}
5040
5041void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005042 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5043 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov02d37832016-10-25 15:25:33 +01005044 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Artem Serov02d37832016-10-25 15:25:33 +01005045 locations->SetOut(LocationFrom(r0));
Nicolas Geoffray8c7c4f12017-01-26 10:13:11 +00005046 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5047 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Artem Serov02d37832016-10-25 15:25:33 +01005048}
5049
5050void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01005051 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
5052 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Artem Serov7b3672e2017-02-03 17:30:34 +00005053 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005054 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Artem Serov7b3672e2017-02-03 17:30:34 +00005055 DCHECK(!codegen_->IsLeafMethod());
Andreas Gampe3db70682018-12-26 15:12:03 -08005056 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 12);
Artem Serov02d37832016-10-25 15:25:33 +01005057}
5058
5059void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) {
5060 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005061 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005062 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5063 if (location.IsStackSlot()) {
5064 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5065 } else if (location.IsDoubleStackSlot()) {
5066 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5067 }
5068 locations->SetOut(location);
5069}
5070
5071void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
5072 HParameterValue* instruction ATTRIBUTE_UNUSED) {
5073 // Nothing to do, the parameter is already at its location.
5074}
5075
5076void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) {
5077 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005078 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005079 locations->SetOut(LocationFrom(kMethodRegister));
5080}
5081
5082void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
5083 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5084 // Nothing to do, the method is already at its location.
5085}
5086
5087void LocationsBuilderARMVIXL::VisitNot(HNot* not_) {
5088 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005089 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005090 locations->SetInAt(0, Location::RequiresRegister());
5091 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5092}
5093
5094void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) {
5095 LocationSummary* locations = not_->GetLocations();
5096 Location out = locations->Out();
5097 Location in = locations->InAt(0);
5098 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005099 case DataType::Type::kInt32:
Artem Serov02d37832016-10-25 15:25:33 +01005100 __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0));
5101 break;
5102
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005103 case DataType::Type::kInt64:
Artem Serov02d37832016-10-25 15:25:33 +01005104 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
5105 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
5106 break;
5107
5108 default:
5109 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
5110 }
5111}
5112
Scott Wakelingc34dba72016-10-03 10:14:44 +01005113void LocationsBuilderARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
5114 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005115 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005116 locations->SetInAt(0, Location::RequiresRegister());
5117 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5118}
5119
5120void InstructionCodeGeneratorARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
5121 __ Eor(OutputRegister(bool_not), InputRegister(bool_not), 1);
5122}
5123
Artem Serov02d37832016-10-25 15:25:33 +01005124void LocationsBuilderARMVIXL::VisitCompare(HCompare* compare) {
5125 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005126 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005127 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005128 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005129 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005130 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005131 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005132 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005133 case DataType::Type::kInt32:
5134 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005135 locations->SetInAt(0, Location::RequiresRegister());
5136 locations->SetInAt(1, Location::RequiresRegister());
5137 // Output overlaps because it is written before doing the low comparison.
5138 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5139 break;
5140 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005141 case DataType::Type::kFloat32:
5142 case DataType::Type::kFloat64: {
Artem Serov02d37832016-10-25 15:25:33 +01005143 locations->SetInAt(0, Location::RequiresFpuRegister());
5144 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1)));
5145 locations->SetOut(Location::RequiresRegister());
5146 break;
5147 }
5148 default:
5149 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
5150 }
5151}
5152
5153void InstructionCodeGeneratorARMVIXL::VisitCompare(HCompare* compare) {
5154 LocationSummary* locations = compare->GetLocations();
5155 vixl32::Register out = OutputRegister(compare);
5156 Location left = locations->InAt(0);
5157 Location right = locations->InAt(1);
5158
5159 vixl32::Label less, greater, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00005160 vixl32::Label* final_label = codegen_->GetFinalLabel(compare, &done);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005161 DataType::Type type = compare->InputAt(0)->GetType();
Vladimir Marko33bff252017-11-01 14:35:42 +00005162 vixl32::Condition less_cond = vixl32::Condition::None();
Artem Serov02d37832016-10-25 15:25:33 +01005163 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005164 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005165 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005166 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005167 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005168 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005169 case DataType::Type::kInt32: {
Artem Serov02d37832016-10-25 15:25:33 +01005170 // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags.
5171 __ Mov(out, 0);
5172 __ Cmp(RegisterFrom(left), RegisterFrom(right)); // Signed compare.
5173 less_cond = lt;
5174 break;
5175 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005176 case DataType::Type::kInt64: {
Artem Serov02d37832016-10-25 15:25:33 +01005177 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right)); // Signed compare.
Andreas Gampe3db70682018-12-26 15:12:03 -08005178 __ B(lt, &less, /* is_far_target= */ false);
5179 __ B(gt, &greater, /* is_far_target= */ false);
Artem Serov02d37832016-10-25 15:25:33 +01005180 // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags.
5181 __ Mov(out, 0);
5182 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right)); // Unsigned compare.
5183 less_cond = lo;
5184 break;
5185 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005186 case DataType::Type::kFloat32:
5187 case DataType::Type::kFloat64: {
Artem Serov02d37832016-10-25 15:25:33 +01005188 __ Mov(out, 0);
Donghui Bai426b49c2016-11-08 14:55:38 +08005189 GenerateVcmp(compare, codegen_);
Artem Serov02d37832016-10-25 15:25:33 +01005190 // To branch on the FP compare result we transfer FPSCR to APSR (encoded as PC in VMRS).
5191 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
5192 less_cond = ARMFPCondition(kCondLT, compare->IsGtBias());
5193 break;
5194 }
5195 default:
5196 LOG(FATAL) << "Unexpected compare type " << type;
5197 UNREACHABLE();
5198 }
5199
Andreas Gampe3db70682018-12-26 15:12:03 -08005200 __ B(eq, final_label, /* is_far_target= */ false);
5201 __ B(less_cond, &less, /* is_far_target= */ false);
Artem Serov02d37832016-10-25 15:25:33 +01005202
5203 __ Bind(&greater);
5204 __ Mov(out, 1);
Anton Kirilov6f644202017-02-27 18:29:45 +00005205 __ B(final_label);
Artem Serov02d37832016-10-25 15:25:33 +01005206
5207 __ Bind(&less);
5208 __ Mov(out, -1);
5209
Anton Kirilov6f644202017-02-27 18:29:45 +00005210 if (done.IsReferenced()) {
5211 __ Bind(&done);
5212 }
Artem Serov02d37832016-10-25 15:25:33 +01005213}
5214
5215void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) {
5216 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005217 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005218 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
5219 locations->SetInAt(i, Location::Any());
5220 }
5221 locations->SetOut(Location::Any());
5222}
5223
5224void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
5225 LOG(FATAL) << "Unreachable";
5226}
5227
5228void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) {
5229 // TODO (ported from quick): revisit ARM barrier kinds.
5230 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
5231 switch (kind) {
5232 case MemBarrierKind::kAnyStore:
5233 case MemBarrierKind::kLoadAny:
5234 case MemBarrierKind::kAnyAny: {
5235 flavor = DmbOptions::ISH;
5236 break;
5237 }
5238 case MemBarrierKind::kStoreStore: {
5239 flavor = DmbOptions::ISHST;
5240 break;
5241 }
5242 default:
5243 LOG(FATAL) << "Unexpected memory barrier " << kind;
5244 }
5245 __ Dmb(flavor);
5246}
5247
5248void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr,
5249 uint32_t offset,
5250 vixl32::Register out_lo,
5251 vixl32::Register out_hi) {
5252 UseScratchRegisterScope temps(GetVIXLAssembler());
5253 if (offset != 0) {
5254 vixl32::Register temp = temps.Acquire();
5255 __ Add(temp, addr, offset);
5256 addr = temp;
5257 }
Scott Wakelingb77051e2016-11-21 19:46:00 +00005258 __ Ldrexd(out_lo, out_hi, MemOperand(addr));
Artem Serov02d37832016-10-25 15:25:33 +01005259}
5260
5261void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr,
5262 uint32_t offset,
5263 vixl32::Register value_lo,
5264 vixl32::Register value_hi,
5265 vixl32::Register temp1,
5266 vixl32::Register temp2,
5267 HInstruction* instruction) {
5268 UseScratchRegisterScope temps(GetVIXLAssembler());
5269 vixl32::Label fail;
5270 if (offset != 0) {
5271 vixl32::Register temp = temps.Acquire();
5272 __ Add(temp, addr, offset);
5273 addr = temp;
5274 }
5275 __ Bind(&fail);
Alexandre Rames374ddf32016-11-04 10:40:49 +00005276 {
5277 // Ensure the pc position is recorded immediately after the `ldrexd` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00005278 ExactAssemblyScope aas(GetVIXLAssembler(),
5279 vixl32::kMaxInstructionSizeInBytes,
5280 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00005281 // We need a load followed by store. (The address used in a STREX instruction must
5282 // be the same as the address in the most recently executed LDREX instruction.)
5283 __ ldrexd(temp1, temp2, MemOperand(addr));
5284 codegen_->MaybeRecordImplicitNullCheck(instruction);
5285 }
Scott Wakelingb77051e2016-11-21 19:46:00 +00005286 __ Strexd(temp1, value_lo, value_hi, MemOperand(addr));
xueliang.zhongf51bc622016-11-04 09:23:32 +00005287 __ CompareAndBranchIfNonZero(temp1, &fail);
Artem Serov02d37832016-10-25 15:25:33 +01005288}
Artem Serov02109dd2016-09-23 17:17:54 +01005289
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005290void LocationsBuilderARMVIXL::HandleFieldSet(
5291 HInstruction* instruction, const FieldInfo& field_info) {
5292 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5293
5294 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005295 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005296 locations->SetInAt(0, Location::RequiresRegister());
5297
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005298 DataType::Type field_type = field_info.GetFieldType();
5299 if (DataType::IsFloatingPointType(field_type)) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005300 locations->SetInAt(1, Location::RequiresFpuRegister());
5301 } else {
5302 locations->SetInAt(1, Location::RequiresRegister());
5303 }
5304
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005305 bool is_wide = field_type == DataType::Type::kInt64 || field_type == DataType::Type::kFloat64;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005306 bool generate_volatile = field_info.IsVolatile()
5307 && is_wide
5308 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
5309 bool needs_write_barrier =
5310 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
5311 // Temporary registers for the write barrier.
5312 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
5313 if (needs_write_barrier) {
5314 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
5315 locations->AddTemp(Location::RequiresRegister());
5316 } else if (generate_volatile) {
5317 // ARM encoding have some additional constraints for ldrexd/strexd:
5318 // - registers need to be consecutive
5319 // - the first register should be even but not R14.
5320 // We don't test for ARM yet, and the assertion makes sure that we
5321 // revisit this if we ever enable ARM encoding.
5322 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5323
5324 locations->AddTemp(Location::RequiresRegister());
5325 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005326 if (field_type == DataType::Type::kFloat64) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005327 // For doubles we need two more registers to copy the value.
5328 locations->AddTemp(LocationFrom(r2));
5329 locations->AddTemp(LocationFrom(r3));
5330 }
5331 }
5332}
5333
5334void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction,
5335 const FieldInfo& field_info,
5336 bool value_can_be_null) {
5337 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5338
5339 LocationSummary* locations = instruction->GetLocations();
5340 vixl32::Register base = InputRegisterAt(instruction, 0);
5341 Location value = locations->InAt(1);
5342
5343 bool is_volatile = field_info.IsVolatile();
5344 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005345 DataType::Type field_type = field_info.GetFieldType();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005346 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5347 bool needs_write_barrier =
5348 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
5349
5350 if (is_volatile) {
5351 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
5352 }
5353
5354 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005355 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005356 case DataType::Type::kUint8:
5357 case DataType::Type::kInt8:
5358 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005359 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005360 case DataType::Type::kInt32: {
5361 StoreOperandType operand_type = GetStoreOperandType(field_type);
5362 GetAssembler()->StoreToOffset(operand_type, RegisterFrom(value), base, offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005363 break;
5364 }
5365
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005366 case DataType::Type::kReference: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005367 if (kPoisonHeapReferences && needs_write_barrier) {
5368 // Note that in the case where `value` is a null reference,
5369 // we do not enter this block, as a null reference does not
5370 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005371 DCHECK_EQ(field_type, DataType::Type::kReference);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005372 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
5373 __ Mov(temp, RegisterFrom(value));
5374 GetAssembler()->PoisonHeapReference(temp);
5375 GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset);
5376 } else {
5377 GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset);
5378 }
5379 break;
5380 }
5381
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005382 case DataType::Type::kInt64: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005383 if (is_volatile && !atomic_ldrd_strd) {
5384 GenerateWideAtomicStore(base,
5385 offset,
5386 LowRegisterFrom(value),
5387 HighRegisterFrom(value),
5388 RegisterFrom(locations->GetTemp(0)),
5389 RegisterFrom(locations->GetTemp(1)),
5390 instruction);
5391 } else {
5392 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
5393 codegen_->MaybeRecordImplicitNullCheck(instruction);
5394 }
5395 break;
5396 }
5397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005398 case DataType::Type::kFloat32: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005399 GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
5400 break;
5401 }
5402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005403 case DataType::Type::kFloat64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01005404 vixl32::DRegister value_reg = DRegisterFrom(value);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005405 if (is_volatile && !atomic_ldrd_strd) {
5406 vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0));
5407 vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1));
5408
5409 __ Vmov(value_reg_lo, value_reg_hi, value_reg);
5410
5411 GenerateWideAtomicStore(base,
5412 offset,
5413 value_reg_lo,
5414 value_reg_hi,
5415 RegisterFrom(locations->GetTemp(2)),
5416 RegisterFrom(locations->GetTemp(3)),
5417 instruction);
5418 } else {
5419 GetAssembler()->StoreDToOffset(value_reg, base, offset);
5420 codegen_->MaybeRecordImplicitNullCheck(instruction);
5421 }
5422 break;
5423 }
5424
Aart Bik66c158e2018-01-31 12:55:04 -08005425 case DataType::Type::kUint32:
5426 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005427 case DataType::Type::kVoid:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005428 LOG(FATAL) << "Unreachable type " << field_type;
5429 UNREACHABLE();
5430 }
5431
5432 // Longs and doubles are handled in the switch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005433 if (field_type != DataType::Type::kInt64 && field_type != DataType::Type::kFloat64) {
Alexandre Rames374ddf32016-11-04 10:40:49 +00005434 // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method, we
5435 // should use a scope and the assembler to emit the store instruction to guarantee that we
5436 // record the pc at the correct position. But the `Assembler` does not automatically handle
5437 // unencodable offsets. Practically, everything is fine because the helper and VIXL, at the time
5438 // of writing, do generate the store instruction last.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005439 codegen_->MaybeRecordImplicitNullCheck(instruction);
5440 }
5441
5442 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5443 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
5444 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
5445 codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null);
5446 }
5447
5448 if (is_volatile) {
5449 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
5450 }
5451}
5452
Artem Serov02d37832016-10-25 15:25:33 +01005453void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
5454 const FieldInfo& field_info) {
5455 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
5456
5457 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005458 kEmitCompilerReadBarrier && (field_info.GetFieldType() == DataType::Type::kReference);
Artem Serov02d37832016-10-25 15:25:33 +01005459 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005460 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5461 object_field_get_with_read_barrier
5462 ? LocationSummary::kCallOnSlowPath
5463 : LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01005464 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
5465 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5466 }
5467 locations->SetInAt(0, Location::RequiresRegister());
5468
5469 bool volatile_for_double = field_info.IsVolatile()
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005470 && (field_info.GetFieldType() == DataType::Type::kFloat64)
Artem Serov02d37832016-10-25 15:25:33 +01005471 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
5472 // The output overlaps in case of volatile long: we don't want the
5473 // code generated by GenerateWideAtomicLoad to overwrite the
5474 // object's location. Likewise, in the case of an object field get
5475 // with read barriers enabled, we do not want the load to overwrite
5476 // the object's location, as we need it to emit the read barrier.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005477 bool overlap =
5478 (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) ||
Artem Serov02d37832016-10-25 15:25:33 +01005479 object_field_get_with_read_barrier;
5480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005481 if (DataType::IsFloatingPointType(instruction->GetType())) {
Artem Serov02d37832016-10-25 15:25:33 +01005482 locations->SetOut(Location::RequiresFpuRegister());
5483 } else {
5484 locations->SetOut(Location::RequiresRegister(),
5485 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
5486 }
5487 if (volatile_for_double) {
5488 // ARM encoding have some additional constraints for ldrexd/strexd:
5489 // - registers need to be consecutive
5490 // - the first register should be even but not R14.
5491 // We don't test for ARM yet, and the assertion makes sure that we
5492 // revisit this if we ever enable ARM encoding.
5493 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5494 locations->AddTemp(Location::RequiresRegister());
5495 locations->AddTemp(Location::RequiresRegister());
5496 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01005497 // We need a temporary register for the read barrier load in
5498 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier()
5499 // only if the offset is too big.
5500 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005501 locations->AddTemp(Location::RequiresRegister());
5502 }
Artem Serov02d37832016-10-25 15:25:33 +01005503 }
5504}
5505
5506Location LocationsBuilderARMVIXL::ArithmeticZeroOrFpuRegister(HInstruction* input) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005507 DCHECK(DataType::IsFloatingPointType(input->GetType())) << input->GetType();
Artem Serov02d37832016-10-25 15:25:33 +01005508 if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) ||
5509 (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) {
5510 return Location::ConstantLocation(input->AsConstant());
5511 } else {
5512 return Location::RequiresFpuRegister();
5513 }
5514}
5515
Artem Serov02109dd2016-09-23 17:17:54 +01005516Location LocationsBuilderARMVIXL::ArmEncodableConstantOrRegister(HInstruction* constant,
5517 Opcode opcode) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005518 DCHECK(!DataType::IsFloatingPointType(constant->GetType()));
Artem Serov02109dd2016-09-23 17:17:54 +01005519 if (constant->IsConstant() &&
5520 CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) {
5521 return Location::ConstantLocation(constant->AsConstant());
5522 }
5523 return Location::RequiresRegister();
5524}
5525
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005526static bool CanEncode32BitConstantAsImmediate(
5527 CodeGeneratorARMVIXL* codegen,
5528 uint32_t value,
5529 Opcode opcode,
5530 vixl32::FlagsUpdate flags_update = vixl32::FlagsUpdate::DontCare) {
5531 ArmVIXLAssembler* assembler = codegen->GetAssembler();
5532 if (assembler->ShifterOperandCanHold(opcode, value, flags_update)) {
Artem Serov02109dd2016-09-23 17:17:54 +01005533 return true;
5534 }
5535 Opcode neg_opcode = kNoOperand;
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005536 uint32_t neg_value = 0;
Artem Serov02109dd2016-09-23 17:17:54 +01005537 switch (opcode) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005538 case AND: neg_opcode = BIC; neg_value = ~value; break;
5539 case ORR: neg_opcode = ORN; neg_value = ~value; break;
5540 case ADD: neg_opcode = SUB; neg_value = -value; break;
5541 case ADC: neg_opcode = SBC; neg_value = ~value; break;
5542 case SUB: neg_opcode = ADD; neg_value = -value; break;
5543 case SBC: neg_opcode = ADC; neg_value = ~value; break;
5544 case MOV: neg_opcode = MVN; neg_value = ~value; break;
Artem Serov02109dd2016-09-23 17:17:54 +01005545 default:
5546 return false;
5547 }
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005548
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005549 if (assembler->ShifterOperandCanHold(neg_opcode, neg_value, flags_update)) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005550 return true;
5551 }
5552
5553 return opcode == AND && IsPowerOfTwo(value + 1);
Artem Serov02109dd2016-09-23 17:17:54 +01005554}
5555
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00005556bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode) {
5557 uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst));
5558 if (DataType::Is64BitType(input_cst->GetType())) {
5559 Opcode high_opcode = opcode;
5560 vixl32::FlagsUpdate low_flags_update = vixl32::FlagsUpdate::DontCare;
5561 switch (opcode) {
5562 case SUB:
5563 // Flip the operation to an ADD.
5564 value = -value;
5565 opcode = ADD;
5566 FALLTHROUGH_INTENDED;
5567 case ADD:
5568 if (Low32Bits(value) == 0u) {
5569 return CanEncode32BitConstantAsImmediate(codegen_, High32Bits(value), opcode);
5570 }
5571 high_opcode = ADC;
5572 low_flags_update = vixl32::FlagsUpdate::SetFlags;
5573 break;
5574 default:
5575 break;
5576 }
5577 return CanEncode32BitConstantAsImmediate(codegen_, High32Bits(value), high_opcode) &&
5578 CanEncode32BitConstantAsImmediate(codegen_, Low32Bits(value), opcode, low_flags_update);
5579 } else {
5580 return CanEncode32BitConstantAsImmediate(codegen_, Low32Bits(value), opcode);
5581 }
5582}
5583
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005584void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction,
5585 const FieldInfo& field_info) {
5586 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
5587
5588 LocationSummary* locations = instruction->GetLocations();
5589 vixl32::Register base = InputRegisterAt(instruction, 0);
5590 Location out = locations->Out();
5591 bool is_volatile = field_info.IsVolatile();
5592 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Vladimir Marko61b92282017-10-11 13:23:17 +01005593 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5594 DataType::Type load_type = instruction->GetType();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005595 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5596
Vladimir Marko61b92282017-10-11 13:23:17 +01005597 switch (load_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005598 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005599 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005600 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005601 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005602 case DataType::Type::kInt16:
5603 case DataType::Type::kInt32: {
Vladimir Marko61b92282017-10-11 13:23:17 +01005604 LoadOperandType operand_type = GetLoadOperandType(load_type);
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005605 GetAssembler()->LoadFromOffset(operand_type, RegisterFrom(out), base, offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005606 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005607 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005608
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005609 case DataType::Type::kReference: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005610 // /* HeapReference<Object> */ out = *(base + offset)
5611 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Vladimir Markodcd117e2018-04-19 11:54:00 +01005612 Location maybe_temp = (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location();
Anton Kirilovedb2ac32016-11-30 15:14:10 +00005613 // Note that a potential implicit null check is handled in this
5614 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier call.
5615 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005616 instruction, out, base, offset, maybe_temp, /* needs_null_check= */ true);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00005617 if (is_volatile) {
5618 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5619 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005620 } else {
5621 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005622 codegen_->MaybeRecordImplicitNullCheck(instruction);
5623 if (is_volatile) {
5624 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5625 }
5626 // If read barriers are enabled, emit read barriers other than
5627 // Baker's using a slow path (and also unpoison the loaded
5628 // reference, if heap poisoning is enabled).
5629 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
5630 }
5631 break;
5632 }
5633
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005634 case DataType::Type::kInt64:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005635 if (is_volatile && !atomic_ldrd_strd) {
5636 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
5637 } else {
5638 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
5639 }
5640 break;
5641
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005642 case DataType::Type::kFloat32:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005643 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
5644 break;
5645
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005646 case DataType::Type::kFloat64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01005647 vixl32::DRegister out_dreg = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005648 if (is_volatile && !atomic_ldrd_strd) {
5649 vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
5650 vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
5651 GenerateWideAtomicLoad(base, offset, lo, hi);
5652 // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a
5653 // scope.
5654 codegen_->MaybeRecordImplicitNullCheck(instruction);
5655 __ Vmov(out_dreg, lo, hi);
5656 } else {
5657 GetAssembler()->LoadDFromOffset(out_dreg, base, offset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005658 codegen_->MaybeRecordImplicitNullCheck(instruction);
5659 }
5660 break;
5661 }
5662
Aart Bik66c158e2018-01-31 12:55:04 -08005663 case DataType::Type::kUint32:
5664 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005665 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01005666 LOG(FATAL) << "Unreachable type " << load_type;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005667 UNREACHABLE();
5668 }
5669
Vladimir Marko61b92282017-10-11 13:23:17 +01005670 if (load_type == DataType::Type::kReference || load_type == DataType::Type::kFloat64) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005671 // Potential implicit null checks, in the case of reference or
5672 // double fields, are handled in the previous switch statement.
5673 } else {
5674 // Address cases other than reference and double that may require an implicit null check.
Alexandre Rames374ddf32016-11-04 10:40:49 +00005675 // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method, we
5676 // should use a scope and the assembler to emit the load instruction to guarantee that we
5677 // record the pc at the correct position. But the `Assembler` does not automatically handle
5678 // unencodable offsets. Practically, everything is fine because the helper and VIXL, at the time
5679 // of writing, do generate the store instruction last.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005680 codegen_->MaybeRecordImplicitNullCheck(instruction);
5681 }
5682
5683 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01005684 if (load_type == DataType::Type::kReference) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005685 // Memory barriers, in the case of references, are also handled
5686 // in the previous switch statement.
5687 } else {
5688 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5689 }
5690 }
5691}
5692
5693void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5694 HandleFieldSet(instruction, instruction->GetFieldInfo());
5695}
5696
5697void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5698 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
5699}
5700
5701void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5702 HandleFieldGet(instruction, instruction->GetFieldInfo());
5703}
5704
5705void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5706 HandleFieldGet(instruction, instruction->GetFieldInfo());
5707}
5708
5709void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5710 HandleFieldGet(instruction, instruction->GetFieldInfo());
5711}
5712
5713void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5714 HandleFieldGet(instruction, instruction->GetFieldInfo());
5715}
5716
Scott Wakelingc34dba72016-10-03 10:14:44 +01005717void LocationsBuilderARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5718 HandleFieldSet(instruction, instruction->GetFieldInfo());
5719}
5720
5721void InstructionCodeGeneratorARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5722 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
5723}
5724
Vladimir Marko552a1342017-10-31 10:56:47 +00005725void LocationsBuilderARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5726 codegen_->CreateStringBuilderAppendLocations(instruction, LocationFrom(r0));
5727}
5728
5729void InstructionCodeGeneratorARMVIXL::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5730 __ Mov(r0, instruction->GetFormat()->GetValue());
5731 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5732}
5733
Artem Serovcfbe9132016-10-14 15:58:56 +01005734void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldGet(
5735 HUnresolvedInstanceFieldGet* instruction) {
5736 FieldAccessCallingConventionARMVIXL calling_convention;
5737 codegen_->CreateUnresolvedFieldLocationSummary(
5738 instruction, instruction->GetFieldType(), calling_convention);
5739}
5740
5741void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldGet(
5742 HUnresolvedInstanceFieldGet* instruction) {
5743 FieldAccessCallingConventionARMVIXL calling_convention;
5744 codegen_->GenerateUnresolvedFieldAccess(instruction,
5745 instruction->GetFieldType(),
5746 instruction->GetFieldIndex(),
5747 instruction->GetDexPc(),
5748 calling_convention);
5749}
5750
5751void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldSet(
5752 HUnresolvedInstanceFieldSet* instruction) {
5753 FieldAccessCallingConventionARMVIXL calling_convention;
5754 codegen_->CreateUnresolvedFieldLocationSummary(
5755 instruction, instruction->GetFieldType(), calling_convention);
5756}
5757
5758void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldSet(
5759 HUnresolvedInstanceFieldSet* instruction) {
5760 FieldAccessCallingConventionARMVIXL calling_convention;
5761 codegen_->GenerateUnresolvedFieldAccess(instruction,
5762 instruction->GetFieldType(),
5763 instruction->GetFieldIndex(),
5764 instruction->GetDexPc(),
5765 calling_convention);
5766}
5767
5768void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldGet(
5769 HUnresolvedStaticFieldGet* instruction) {
5770 FieldAccessCallingConventionARMVIXL calling_convention;
5771 codegen_->CreateUnresolvedFieldLocationSummary(
5772 instruction, instruction->GetFieldType(), calling_convention);
5773}
5774
5775void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldGet(
5776 HUnresolvedStaticFieldGet* instruction) {
5777 FieldAccessCallingConventionARMVIXL calling_convention;
5778 codegen_->GenerateUnresolvedFieldAccess(instruction,
5779 instruction->GetFieldType(),
5780 instruction->GetFieldIndex(),
5781 instruction->GetDexPc(),
5782 calling_convention);
5783}
5784
5785void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldSet(
5786 HUnresolvedStaticFieldSet* instruction) {
5787 FieldAccessCallingConventionARMVIXL calling_convention;
5788 codegen_->CreateUnresolvedFieldLocationSummary(
5789 instruction, instruction->GetFieldType(), calling_convention);
5790}
5791
5792void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldSet(
5793 HUnresolvedStaticFieldSet* instruction) {
5794 FieldAccessCallingConventionARMVIXL calling_convention;
5795 codegen_->GenerateUnresolvedFieldAccess(instruction,
5796 instruction->GetFieldType(),
5797 instruction->GetFieldIndex(),
5798 instruction->GetDexPc(),
5799 calling_convention);
5800}
5801
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005802void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) {
Artem Serov657022c2016-11-23 14:19:38 +00005803 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005804 locations->SetInAt(0, Location::RequiresRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005805}
5806
5807void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
5808 if (CanMoveNullCheckToUser(instruction)) {
5809 return;
5810 }
5811
5812 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames374ddf32016-11-04 10:40:49 +00005813 // Ensure the pc position is recorded immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00005814 ExactAssemblyScope aas(GetVIXLAssembler(),
5815 vixl32::kMaxInstructionSizeInBytes,
5816 CodeBufferCheckScope::kMaximumSize);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005817 __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
5818 RecordPcInfo(instruction, instruction->GetDexPc());
5819}
5820
5821void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
5822 NullCheckSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005823 new (GetScopedAllocator()) NullCheckSlowPathARMVIXL(instruction);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005824 AddSlowPath(slow_path);
xueliang.zhongf51bc622016-11-04 09:23:32 +00005825 __ CompareAndBranchIfZero(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005826}
5827
5828void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) {
5829 codegen_->GenerateNullCheck(instruction);
5830}
5831
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005832void CodeGeneratorARMVIXL::LoadFromShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +01005833 Location out_loc,
5834 vixl32::Register base,
5835 vixl32::Register reg_index,
5836 vixl32::Condition cond) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005837 uint32_t shift_count = DataType::SizeShift(type);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005838 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
5839
5840 switch (type) {
Vladimir Marko61b92282017-10-11 13:23:17 +01005841 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005842 case DataType::Type::kUint8:
Vladimir Marko61b92282017-10-11 13:23:17 +01005843 __ Ldrb(cond, RegisterFrom(out_loc), mem_address);
5844 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005845 case DataType::Type::kInt8:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005846 __ Ldrsb(cond, RegisterFrom(out_loc), mem_address);
5847 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005848 case DataType::Type::kUint16:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005849 __ Ldrh(cond, RegisterFrom(out_loc), mem_address);
5850 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005851 case DataType::Type::kInt16:
5852 __ Ldrsh(cond, RegisterFrom(out_loc), mem_address);
5853 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005854 case DataType::Type::kReference:
5855 case DataType::Type::kInt32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005856 __ Ldr(cond, RegisterFrom(out_loc), mem_address);
5857 break;
5858 // T32 doesn't support LoadFromShiftedRegOffset mem address mode for these types.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005859 case DataType::Type::kInt64:
5860 case DataType::Type::kFloat32:
5861 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005862 default:
5863 LOG(FATAL) << "Unreachable type " << type;
5864 UNREACHABLE();
5865 }
5866}
5867
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005868void CodeGeneratorARMVIXL::StoreToShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +01005869 Location loc,
5870 vixl32::Register base,
5871 vixl32::Register reg_index,
5872 vixl32::Condition cond) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005873 uint32_t shift_count = DataType::SizeShift(type);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005874 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
5875
5876 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005877 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005878 case DataType::Type::kUint8:
5879 case DataType::Type::kInt8:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005880 __ Strb(cond, RegisterFrom(loc), mem_address);
5881 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005882 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005883 case DataType::Type::kInt16:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005884 __ Strh(cond, RegisterFrom(loc), mem_address);
5885 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005886 case DataType::Type::kReference:
5887 case DataType::Type::kInt32:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005888 __ Str(cond, RegisterFrom(loc), mem_address);
5889 break;
5890 // T32 doesn't support StoreToShiftedRegOffset mem address mode for these types.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005891 case DataType::Type::kInt64:
5892 case DataType::Type::kFloat32:
5893 case DataType::Type::kFloat64:
Scott Wakelingc34dba72016-10-03 10:14:44 +01005894 default:
5895 LOG(FATAL) << "Unreachable type " << type;
5896 UNREACHABLE();
5897 }
5898}
5899
5900void LocationsBuilderARMVIXL::VisitArrayGet(HArrayGet* instruction) {
5901 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005902 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005903 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005904 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5905 object_array_get_with_read_barrier
5906 ? LocationSummary::kCallOnSlowPath
5907 : LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005908 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00005909 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Scott Wakelingc34dba72016-10-03 10:14:44 +01005910 }
5911 locations->SetInAt(0, Location::RequiresRegister());
5912 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005913 if (DataType::IsFloatingPointType(instruction->GetType())) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01005914 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5915 } else {
5916 // The output overlaps in the case of an object array get with
5917 // read barriers enabled: we do not want the move to overwrite the
5918 // array's location, as we need it to emit the read barrier.
5919 locations->SetOut(
5920 Location::RequiresRegister(),
5921 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
5922 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005923 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01005924 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005925 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01005926 // We need a temporary register for the read barrier load in
5927 // CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier()
5928 // only if the offset is too big.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005929 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
5930 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005931 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005932 if (offset >= kReferenceLoadMinFarOffset) {
5933 locations->AddTemp(Location::RequiresRegister());
5934 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005935 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01005936 // We need a non-scratch temporary for the array data pointer in
5937 // CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005938 locations->AddTemp(Location::RequiresRegister());
5939 }
5940 } else if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5941 // Also need a temporary for String compression feature.
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005942 locations->AddTemp(Location::RequiresRegister());
Scott Wakelingc34dba72016-10-03 10:14:44 +01005943 }
5944}
5945
5946void InstructionCodeGeneratorARMVIXL::VisitArrayGet(HArrayGet* instruction) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01005947 LocationSummary* locations = instruction->GetLocations();
5948 Location obj_loc = locations->InAt(0);
5949 vixl32::Register obj = InputRegisterAt(instruction, 0);
5950 Location index = locations->InAt(1);
5951 Location out_loc = locations->Out();
5952 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005953 DataType::Type type = instruction->GetType();
Scott Wakelingc34dba72016-10-03 10:14:44 +01005954 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
5955 instruction->IsStringCharAt();
5956 HInstruction* array_instr = instruction->GetArray();
5957 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Scott Wakelingc34dba72016-10-03 10:14:44 +01005958
5959 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005960 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005961 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005962 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005963 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005964 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005965 case DataType::Type::kInt32: {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005966 vixl32::Register length;
5967 if (maybe_compressed_char_at) {
5968 length = RegisterFrom(locations->GetTemp(0));
5969 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5970 GetAssembler()->LoadFromOffset(kLoadWord, length, obj, count_offset);
5971 codegen_->MaybeRecordImplicitNullCheck(instruction);
5972 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01005973 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00005974 int32_t const_index = Int32ConstantFrom(index);
Scott Wakelingc34dba72016-10-03 10:14:44 +01005975 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005976 vixl32::Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00005977 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005978 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
5979 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5980 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe3db70682018-12-26 15:12:03 -08005981 __ B(cs, &uncompressed_load, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005982 GetAssembler()->LoadFromOffset(kLoadUnsignedByte,
5983 RegisterFrom(out_loc),
5984 obj,
5985 data_offset + const_index);
Anton Kirilov6f644202017-02-27 18:29:45 +00005986 __ B(final_label);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005987 __ Bind(&uncompressed_load);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005988 GetAssembler()->LoadFromOffset(GetLoadOperandType(DataType::Type::kUint16),
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005989 RegisterFrom(out_loc),
5990 obj,
5991 data_offset + (const_index << 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00005992 if (done.IsReferenced()) {
5993 __ Bind(&done);
5994 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01005995 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005996 uint32_t full_offset = data_offset + (const_index << DataType::SizeShift(type));
Scott Wakelingc34dba72016-10-03 10:14:44 +01005997
5998 LoadOperandType load_type = GetLoadOperandType(type);
5999 GetAssembler()->LoadFromOffset(load_type, RegisterFrom(out_loc), obj, full_offset);
6000 }
6001 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006002 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006003 vixl32::Register temp = temps.Acquire();
6004
6005 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006006 // We do not need to compute the intermediate address from the array: the
6007 // input instruction has done it already. See the comment in
6008 // `TryExtractArrayAccessAddress()`.
6009 if (kIsDebugBuild) {
6010 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006011 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006012 }
6013 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006014 } else {
6015 __ Add(temp, obj, data_offset);
6016 }
6017 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006018 vixl32::Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006019 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006020 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
6021 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6022 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe3db70682018-12-26 15:12:03 -08006023 __ B(cs, &uncompressed_load, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006024 __ Ldrb(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 0));
Anton Kirilov6f644202017-02-27 18:29:45 +00006025 __ B(final_label);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006026 __ Bind(&uncompressed_load);
6027 __ Ldrh(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00006028 if (done.IsReferenced()) {
6029 __ Bind(&done);
6030 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006031 } else {
6032 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
6033 }
6034 }
6035 break;
6036 }
6037
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006038 case DataType::Type::kReference: {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006039 // The read barrier instrumentation of object ArrayGet
6040 // instructions does not support the HIntermediateAddress
6041 // instruction.
6042 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
6043
Scott Wakelingc34dba72016-10-03 10:14:44 +01006044 static_assert(
6045 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6046 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6047 // /* HeapReference<Object> */ out =
6048 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6049 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006050 // Note that a potential implicit null check is handled in this
6051 // CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006052 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
6053 if (index.IsConstant()) {
6054 // Array load with a constant index can be treated as a field load.
Vladimir Markodcd117e2018-04-19 11:54:00 +01006055 Location maybe_temp =
6056 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006057 data_offset += Int32ConstantFrom(index) << DataType::SizeShift(type);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006058 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6059 out_loc,
6060 obj,
6061 data_offset,
Vladimir Markodcd117e2018-04-19 11:54:00 +01006062 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08006063 /* needs_null_check= */ false);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006064 } else {
Vladimir Markodcd117e2018-04-19 11:54:00 +01006065 Location temp = locations->GetTemp(0);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006066 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08006067 out_loc, obj, data_offset, index, temp, /* needs_null_check= */ false);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006068 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006069 } else {
6070 vixl32::Register out = OutputRegister(instruction);
6071 if (index.IsConstant()) {
6072 size_t offset =
Anton Kirilov644032c2016-12-06 17:51:43 +00006073 (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006074 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006075 // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method,
6076 // we should use a scope and the assembler to emit the load instruction to guarantee that
6077 // we record the pc at the correct position. But the `Assembler` does not automatically
6078 // handle unencodable offsets. Practically, everything is fine because the helper and
6079 // VIXL, at the time of writing, do generate the store instruction last.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006080 codegen_->MaybeRecordImplicitNullCheck(instruction);
6081 // If read barriers are enabled, emit read barriers other than
6082 // Baker's using a slow path (and also unpoison the loaded
6083 // reference, if heap poisoning is enabled).
6084 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
6085 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006086 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006087 vixl32::Register temp = temps.Acquire();
6088
6089 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006090 // We do not need to compute the intermediate address from the array: the
6091 // input instruction has done it already. See the comment in
6092 // `TryExtractArrayAccessAddress()`.
6093 if (kIsDebugBuild) {
6094 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006095 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006096 }
6097 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006098 } else {
6099 __ Add(temp, obj, data_offset);
6100 }
6101 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006102 temps.Close();
Alexandre Rames374ddf32016-11-04 10:40:49 +00006103 // TODO(VIXL): Use a scope to ensure that we record the pc position immediately after the
6104 // load instruction. Practically, everything is fine because the helper and VIXL, at the
6105 // time of writing, do generate the store instruction last.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006106 codegen_->MaybeRecordImplicitNullCheck(instruction);
6107 // If read barriers are enabled, emit read barriers other than
6108 // Baker's using a slow path (and also unpoison the loaded
6109 // reference, if heap poisoning is enabled).
6110 codegen_->MaybeGenerateReadBarrierSlow(
6111 instruction, out_loc, out_loc, obj_loc, data_offset, index);
6112 }
6113 }
6114 break;
6115 }
6116
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006117 case DataType::Type::kInt64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006118 if (index.IsConstant()) {
6119 size_t offset =
Anton Kirilov644032c2016-12-06 17:51:43 +00006120 (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006121 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), obj, offset);
6122 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006123 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006124 vixl32::Register temp = temps.Acquire();
6125 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6126 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), temp, data_offset);
6127 }
6128 break;
6129 }
6130
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006131 case DataType::Type::kFloat32: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006132 vixl32::SRegister out = SRegisterFrom(out_loc);
6133 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006134 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006135 GetAssembler()->LoadSFromOffset(out, obj, offset);
6136 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006137 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006138 vixl32::Register temp = temps.Acquire();
6139 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
6140 GetAssembler()->LoadSFromOffset(out, temp, data_offset);
6141 }
6142 break;
6143 }
6144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006145 case DataType::Type::kFloat64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006146 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006147 size_t offset = (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006148 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), obj, offset);
6149 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006150 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006151 vixl32::Register temp = temps.Acquire();
6152 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6153 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), temp, data_offset);
6154 }
6155 break;
6156 }
6157
Aart Bik66c158e2018-01-31 12:55:04 -08006158 case DataType::Type::kUint32:
6159 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006160 case DataType::Type::kVoid:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006161 LOG(FATAL) << "Unreachable type " << type;
6162 UNREACHABLE();
6163 }
6164
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006165 if (type == DataType::Type::kReference) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006166 // Potential implicit null checks, in the case of reference
6167 // arrays, are handled in the previous switch statement.
6168 } else if (!maybe_compressed_char_at) {
Alexandre Rames374ddf32016-11-04 10:40:49 +00006169 // TODO(VIXL): Use a scope to ensure we record the pc info immediately after
6170 // the preceding load instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006171 codegen_->MaybeRecordImplicitNullCheck(instruction);
6172 }
6173}
6174
6175void LocationsBuilderARMVIXL::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006176 DataType::Type value_type = instruction->GetComponentType();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006177
6178 bool needs_write_barrier =
6179 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006180 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006181
Vladimir Markoca6fff82017-10-03 14:49:14 +01006182 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Scott Wakelingc34dba72016-10-03 10:14:44 +01006183 instruction,
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006184 may_need_runtime_call_for_type_check ?
6185 LocationSummary::kCallOnSlowPath :
6186 LocationSummary::kNoCall);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006187
6188 locations->SetInAt(0, Location::RequiresRegister());
6189 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006190 if (DataType::IsFloatingPointType(value_type)) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006191 locations->SetInAt(2, Location::RequiresFpuRegister());
6192 } else {
6193 locations->SetInAt(2, Location::RequiresRegister());
6194 }
6195 if (needs_write_barrier) {
6196 // Temporary registers for the write barrier.
6197 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
6198 locations->AddTemp(Location::RequiresRegister());
6199 }
6200}
6201
6202void InstructionCodeGeneratorARMVIXL::VisitArraySet(HArraySet* instruction) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006203 LocationSummary* locations = instruction->GetLocations();
6204 vixl32::Register array = InputRegisterAt(instruction, 0);
6205 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006206 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006207 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006208 bool needs_write_barrier =
6209 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
6210 uint32_t data_offset =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006211 mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006212 Location value_loc = locations->InAt(2);
6213 HInstruction* array_instr = instruction->GetArray();
6214 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Scott Wakelingc34dba72016-10-03 10:14:44 +01006215
6216 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006217 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006218 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006219 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006220 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006221 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006222 case DataType::Type::kInt32: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006223 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006224 int32_t const_index = Int32ConstantFrom(index);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006225 uint32_t full_offset =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006226 data_offset + (const_index << DataType::SizeShift(value_type));
Scott Wakelingc34dba72016-10-03 10:14:44 +01006227 StoreOperandType store_type = GetStoreOperandType(value_type);
6228 GetAssembler()->StoreToOffset(store_type, RegisterFrom(value_loc), array, full_offset);
6229 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006230 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006231 vixl32::Register temp = temps.Acquire();
6232
6233 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006234 // We do not need to compute the intermediate address from the array: the
6235 // input instruction has done it already. See the comment in
6236 // `TryExtractArrayAccessAddress()`.
6237 if (kIsDebugBuild) {
6238 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
Anton Kirilov644032c2016-12-06 17:51:43 +00006239 DCHECK_EQ(Uint64ConstantFrom(tmp->GetOffset()), data_offset);
Artem Serov2bbc9532016-10-21 11:51:50 +01006240 }
6241 temp = array;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006242 } else {
6243 __ Add(temp, array, data_offset);
6244 }
6245 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
6246 }
6247 break;
6248 }
6249
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006250 case DataType::Type::kReference: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006251 vixl32::Register value = RegisterFrom(value_loc);
6252 // TryExtractArrayAccessAddress optimization is never applied for non-primitive ArraySet.
6253 // See the comment in instruction_simplifier_shared.cc.
6254 DCHECK(!has_intermediate_address);
6255
6256 if (instruction->InputAt(2)->IsNullConstant()) {
6257 // Just setting null.
6258 if (index.IsConstant()) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006259 size_t offset =
6260 (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006261 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
6262 } else {
6263 DCHECK(index.IsRegister()) << index;
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006264 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006265 vixl32::Register temp = temps.Acquire();
6266 __ Add(temp, array, data_offset);
6267 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
6268 }
Alexandre Rames374ddf32016-11-04 10:40:49 +00006269 // TODO(VIXL): Use a scope to ensure we record the pc info immediately after the preceding
6270 // store instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006271 codegen_->MaybeRecordImplicitNullCheck(instruction);
6272 DCHECK(!needs_write_barrier);
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006273 DCHECK(!may_need_runtime_call_for_type_check);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006274 break;
6275 }
6276
6277 DCHECK(needs_write_barrier);
6278 Location temp1_loc = locations->GetTemp(0);
6279 vixl32::Register temp1 = RegisterFrom(temp1_loc);
6280 Location temp2_loc = locations->GetTemp(1);
6281 vixl32::Register temp2 = RegisterFrom(temp2_loc);
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006282 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6283 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6284 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6285 vixl32::Label done;
6286 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
6287 SlowPathCodeARMVIXL* slow_path = nullptr;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006288
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006289 if (may_need_runtime_call_for_type_check) {
6290 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARMVIXL(instruction);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006291 codegen_->AddSlowPath(slow_path);
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006292 if (instruction->GetValueCanBeNull()) {
6293 vixl32::Label non_zero;
6294 __ CompareAndBranchIfNonZero(value, &non_zero);
6295 if (index.IsConstant()) {
6296 size_t offset =
6297 (Int32ConstantFrom(index) << TIMES_4) + data_offset;
6298 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
6299 } else {
6300 DCHECK(index.IsRegister()) << index;
6301 UseScratchRegisterScope temps(GetVIXLAssembler());
6302 vixl32::Register temp = temps.Acquire();
6303 __ Add(temp, array, data_offset);
6304 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
6305 }
6306 // TODO(VIXL): Use a scope to ensure we record the pc info immediately after the preceding
6307 // store instruction.
6308 codegen_->MaybeRecordImplicitNullCheck(instruction);
6309 __ B(final_label);
6310 __ Bind(&non_zero);
6311 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006312
6313 // Note that when read barriers are enabled, the type checks
6314 // are performed without read barriers. This is fine, even in
6315 // the case where a class object is in the from-space after
6316 // the flip, as a comparison involving such a type would not
6317 // produce a false positive; it may of course produce a false
6318 // negative, in which case we would take the ArraySet slow
6319 // path.
6320
Alexandre Rames374ddf32016-11-04 10:40:49 +00006321 {
6322 // Ensure we record the pc position immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00006323 ExactAssemblyScope aas(GetVIXLAssembler(),
6324 vixl32::kMaxInstructionSizeInBytes,
6325 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006326 // /* HeapReference<Class> */ temp1 = array->klass_
6327 __ ldr(temp1, MemOperand(array, class_offset));
6328 codegen_->MaybeRecordImplicitNullCheck(instruction);
6329 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006330 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
6331
6332 // /* HeapReference<Class> */ temp1 = temp1->component_type_
6333 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
6334 // /* HeapReference<Class> */ temp2 = value->klass_
6335 GetAssembler()->LoadFromOffset(kLoadWord, temp2, value, class_offset);
6336 // If heap poisoning is enabled, no need to unpoison `temp1`
6337 // nor `temp2`, as we are comparing two poisoned references.
6338 __ Cmp(temp1, temp2);
6339
6340 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006341 vixl32::Label do_put;
6342 __ B(eq, &do_put, /* is_far_target= */ false);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006343 // If heap poisoning is enabled, the `temp1` reference has
6344 // not been unpoisoned yet; unpoison it now.
6345 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
6346
6347 // /* HeapReference<Class> */ temp1 = temp1->super_class_
6348 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
6349 // If heap poisoning is enabled, no need to unpoison
6350 // `temp1`, as we are comparing against null below.
xueliang.zhongf51bc622016-11-04 09:23:32 +00006351 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006352 __ Bind(&do_put);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006353 } else {
6354 __ B(ne, slow_path->GetEntryLabel());
6355 }
6356 }
6357
6358 vixl32::Register source = value;
6359 if (kPoisonHeapReferences) {
6360 // Note that in the case where `value` is a null reference,
6361 // we do not enter this block, as a null reference does not
6362 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006363 DCHECK_EQ(value_type, DataType::Type::kReference);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006364 __ Mov(temp1, value);
6365 GetAssembler()->PoisonHeapReference(temp1);
6366 source = temp1;
6367 }
6368
6369 if (index.IsConstant()) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006370 size_t offset =
6371 (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006372 GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
6373 } else {
6374 DCHECK(index.IsRegister()) << index;
6375
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006376 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006377 vixl32::Register temp = temps.Acquire();
6378 __ Add(temp, array, data_offset);
6379 codegen_->StoreToShiftedRegOffset(value_type,
6380 LocationFrom(source),
6381 temp,
6382 RegisterFrom(index));
6383 }
6384
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006385 if (!may_need_runtime_call_for_type_check) {
Alexandre Rames374ddf32016-11-04 10:40:49 +00006386 // TODO(VIXL): Ensure we record the pc position immediately after the preceding store
6387 // instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006388 codegen_->MaybeRecordImplicitNullCheck(instruction);
6389 }
6390
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006391 codegen_->MarkGCCard(temp1, temp2, array, value, instruction->GetValueCanBeNull());
6392
6393 if (done.IsReferenced()) {
6394 __ Bind(&done);
6395 }
6396
6397 if (slow_path != nullptr) {
6398 __ Bind(slow_path->GetExitLabel());
6399 }
6400
Scott Wakelingc34dba72016-10-03 10:14:44 +01006401 break;
6402 }
6403
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006404 case DataType::Type::kInt64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006405 Location value = locations->InAt(2);
6406 if (index.IsConstant()) {
6407 size_t offset =
Anton Kirilov644032c2016-12-06 17:51:43 +00006408 (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006409 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), array, offset);
6410 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006411 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006412 vixl32::Register temp = temps.Acquire();
6413 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6414 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), temp, data_offset);
6415 }
6416 break;
6417 }
6418
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006419 case DataType::Type::kFloat32: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006420 Location value = locations->InAt(2);
6421 DCHECK(value.IsFpuRegister());
6422 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006423 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006424 GetAssembler()->StoreSToOffset(SRegisterFrom(value), array, offset);
6425 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006426 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006427 vixl32::Register temp = temps.Acquire();
6428 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
6429 GetAssembler()->StoreSToOffset(SRegisterFrom(value), temp, data_offset);
6430 }
6431 break;
6432 }
6433
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006434 case DataType::Type::kFloat64: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006435 Location value = locations->InAt(2);
6436 DCHECK(value.IsFpuRegisterPair());
6437 if (index.IsConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006438 size_t offset = (Int32ConstantFrom(index) << TIMES_8) + data_offset;
Scott Wakelingc34dba72016-10-03 10:14:44 +01006439 GetAssembler()->StoreDToOffset(DRegisterFrom(value), array, offset);
6440 } else {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006441 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelingc34dba72016-10-03 10:14:44 +01006442 vixl32::Register temp = temps.Acquire();
6443 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
6444 GetAssembler()->StoreDToOffset(DRegisterFrom(value), temp, data_offset);
6445 }
6446 break;
6447 }
6448
Aart Bik66c158e2018-01-31 12:55:04 -08006449 case DataType::Type::kUint32:
6450 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006451 case DataType::Type::kVoid:
Scott Wakelingc34dba72016-10-03 10:14:44 +01006452 LOG(FATAL) << "Unreachable type " << value_type;
6453 UNREACHABLE();
6454 }
6455
6456 // Objects are handled in the switch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006457 if (value_type != DataType::Type::kReference) {
Alexandre Rames374ddf32016-11-04 10:40:49 +00006458 // TODO(VIXL): Ensure we record the pc position immediately after the preceding store
6459 // instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01006460 codegen_->MaybeRecordImplicitNullCheck(instruction);
6461 }
6462}
6463
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006464void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {
6465 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006466 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006467 locations->SetInAt(0, Location::RequiresRegister());
6468 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6469}
6470
6471void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) {
6472 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
6473 vixl32::Register obj = InputRegisterAt(instruction, 0);
6474 vixl32::Register out = OutputRegister(instruction);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006475 {
Artem Serov0fb37192016-12-06 18:13:40 +00006476 ExactAssemblyScope aas(GetVIXLAssembler(),
6477 vixl32::kMaxInstructionSizeInBytes,
6478 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00006479 __ ldr(out, MemOperand(obj, offset));
6480 codegen_->MaybeRecordImplicitNullCheck(instruction);
6481 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006482 // Mask out compression flag from String's array length.
6483 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006484 __ Lsr(out, out, 1u);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006485 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006486}
6487
Artem Serov2bbc9532016-10-21 11:51:50 +01006488void LocationsBuilderARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Artem Serov2bbc9532016-10-21 11:51:50 +01006489 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006490 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serov2bbc9532016-10-21 11:51:50 +01006491
6492 locations->SetInAt(0, Location::RequiresRegister());
6493 locations->SetInAt(1, Location::RegisterOrConstant(instruction->GetOffset()));
6494 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6495}
6496
6497void InstructionCodeGeneratorARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
6498 vixl32::Register out = OutputRegister(instruction);
6499 vixl32::Register first = InputRegisterAt(instruction, 0);
6500 Location second = instruction->GetLocations()->InAt(1);
6501
Artem Serov2bbc9532016-10-21 11:51:50 +01006502 if (second.IsRegister()) {
6503 __ Add(out, first, RegisterFrom(second));
6504 } else {
Anton Kirilov644032c2016-12-06 17:51:43 +00006505 __ Add(out, first, Int32ConstantFrom(second));
Artem Serov2bbc9532016-10-21 11:51:50 +01006506 }
6507}
6508
Artem Serove1811ed2017-04-27 16:50:47 +01006509void LocationsBuilderARMVIXL::VisitIntermediateAddressIndex(
6510 HIntermediateAddressIndex* instruction) {
6511 LOG(FATAL) << "Unreachable " << instruction->GetId();
6512}
6513
6514void InstructionCodeGeneratorARMVIXL::VisitIntermediateAddressIndex(
6515 HIntermediateAddressIndex* instruction) {
6516 LOG(FATAL) << "Unreachable " << instruction->GetId();
6517}
6518
Scott Wakelingc34dba72016-10-03 10:14:44 +01006519void LocationsBuilderARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
6520 RegisterSet caller_saves = RegisterSet::Empty();
6521 InvokeRuntimeCallingConventionARMVIXL calling_convention;
6522 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
6523 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(1)));
6524 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Artem Serov2dd053d2017-03-08 14:54:06 +00006525
6526 HInstruction* index = instruction->InputAt(0);
6527 HInstruction* length = instruction->InputAt(1);
6528 // If both index and length are constants we can statically check the bounds. But if at least one
6529 // of them is not encodable ArmEncodableConstantOrRegister will create
6530 // Location::RequiresRegister() which is not desired to happen. Instead we create constant
6531 // locations.
6532 bool both_const = index->IsConstant() && length->IsConstant();
6533 locations->SetInAt(0, both_const
6534 ? Location::ConstantLocation(index->AsConstant())
6535 : ArmEncodableConstantOrRegister(index, CMP));
6536 locations->SetInAt(1, both_const
6537 ? Location::ConstantLocation(length->AsConstant())
6538 : ArmEncodableConstantOrRegister(length, CMP));
Scott Wakelingc34dba72016-10-03 10:14:44 +01006539}
6540
6541void InstructionCodeGeneratorARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
Artem Serov2dd053d2017-03-08 14:54:06 +00006542 LocationSummary* locations = instruction->GetLocations();
6543 Location index_loc = locations->InAt(0);
6544 Location length_loc = locations->InAt(1);
Scott Wakelingc34dba72016-10-03 10:14:44 +01006545
Artem Serov2dd053d2017-03-08 14:54:06 +00006546 if (length_loc.IsConstant()) {
6547 int32_t length = Int32ConstantFrom(length_loc);
6548 if (index_loc.IsConstant()) {
6549 // BCE will remove the bounds check if we are guaranteed to pass.
6550 int32_t index = Int32ConstantFrom(index_loc);
6551 if (index < 0 || index >= length) {
6552 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006553 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006554 codegen_->AddSlowPath(slow_path);
6555 __ B(slow_path->GetEntryLabel());
6556 } else {
6557 // Some optimization after BCE may have generated this, and we should not
6558 // generate a bounds check if it is a valid range.
6559 }
6560 return;
6561 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006562
Artem Serov2dd053d2017-03-08 14:54:06 +00006563 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006564 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006565 __ Cmp(RegisterFrom(index_loc), length);
6566 codegen_->AddSlowPath(slow_path);
6567 __ B(hs, slow_path->GetEntryLabel());
6568 } else {
6569 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006570 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARMVIXL(instruction);
Artem Serov2dd053d2017-03-08 14:54:06 +00006571 __ Cmp(RegisterFrom(length_loc), InputOperandAt(instruction, 0));
6572 codegen_->AddSlowPath(slow_path);
6573 __ B(ls, slow_path->GetEntryLabel());
6574 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01006575}
6576
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006577void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp,
6578 vixl32::Register card,
6579 vixl32::Register object,
6580 vixl32::Register value,
6581 bool can_be_null) {
6582 vixl32::Label is_null;
6583 if (can_be_null) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00006584 __ CompareAndBranchIfZero(value, &is_null);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006585 }
Roland Levillainc73f0522018-08-14 15:16:50 +01006586 // Load the address of the card table into `card`.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006587 GetAssembler()->LoadFromOffset(
6588 kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
Roland Levillainc73f0522018-08-14 15:16:50 +01006589 // Calculate the offset (in the card table) of the card corresponding to
6590 // `object`.
Scott Wakelingb77051e2016-11-21 19:46:00 +00006591 __ Lsr(temp, object, Operand::From(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01006592 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
6593 // `object`'s card.
6594 //
6595 // Register `card` contains the address of the card table. Note that the card
6596 // table's base is biased during its creation so that it always starts at an
6597 // address whose least-significant byte is equal to `kCardDirty` (see
6598 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
6599 // below writes the `kCardDirty` (byte) value into the `object`'s card
6600 // (located at `card + object >> kCardShift`).
6601 //
6602 // This dual use of the value in register `card` (1. to calculate the location
6603 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
6604 // (no need to explicitly load `kCardDirty` as an immediate value).
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006605 __ Strb(card, MemOperand(card, temp));
6606 if (can_be_null) {
6607 __ Bind(&is_null);
6608 }
6609}
6610
Scott Wakelingfe885462016-09-22 10:24:38 +01006611void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6612 LOG(FATAL) << "Unreachable";
6613}
6614
6615void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006616 if (instruction->GetNext()->IsSuspendCheck() &&
6617 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6618 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6619 // The back edge will generate the suspend check.
6620 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6621 }
6622
Scott Wakelingfe885462016-09-22 10:24:38 +01006623 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6624}
6625
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006626void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006627 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6628 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov657022c2016-11-23 14:19:38 +00006629 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006630}
6631
6632void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
6633 HBasicBlock* block = instruction->GetBlock();
6634 if (block->GetLoopInformation() != nullptr) {
6635 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6636 // The back edge will generate the suspend check.
6637 return;
6638 }
6639 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6640 // The goto will generate the suspend check.
6641 return;
6642 }
6643 GenerateSuspendCheck(instruction, nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08006644 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 13);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006645}
6646
6647void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction,
6648 HBasicBlock* successor) {
6649 SuspendCheckSlowPathARMVIXL* slow_path =
6650 down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath());
6651 if (slow_path == nullptr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006652 slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006653 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARMVIXL(instruction, successor);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006654 instruction->SetSlowPath(slow_path);
6655 codegen_->AddSlowPath(slow_path);
6656 if (successor != nullptr) {
6657 DCHECK(successor->IsLoopHeader());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006658 }
6659 } else {
6660 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6661 }
6662
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006663 UseScratchRegisterScope temps(GetVIXLAssembler());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006664 vixl32::Register temp = temps.Acquire();
6665 GetAssembler()->LoadFromOffset(
6666 kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
6667 if (successor == nullptr) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00006668 __ CompareAndBranchIfNonZero(temp, slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006669 __ Bind(slow_path->GetReturnLabel());
6670 } else {
xueliang.zhongf51bc622016-11-04 09:23:32 +00006671 __ CompareAndBranchIfZero(temp, codegen_->GetLabelOf(successor));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006672 __ B(slow_path->GetEntryLabel());
6673 }
6674}
6675
Scott Wakelingfe885462016-09-22 10:24:38 +01006676ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const {
6677 return codegen_->GetAssembler();
6678}
6679
6680void ParallelMoveResolverARMVIXL::EmitMove(size_t index) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006681 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Scott Wakelingfe885462016-09-22 10:24:38 +01006682 MoveOperands* move = moves_[index];
6683 Location source = move->GetSource();
6684 Location destination = move->GetDestination();
6685
6686 if (source.IsRegister()) {
6687 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006688 __ Mov(RegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01006689 } else if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006690 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01006691 } else {
6692 DCHECK(destination.IsStackSlot());
6693 GetAssembler()->StoreToOffset(kStoreWord,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006694 RegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01006695 sp,
6696 destination.GetStackIndex());
6697 }
6698 } else if (source.IsStackSlot()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006699 if (destination.IsRegister()) {
6700 GetAssembler()->LoadFromOffset(kLoadWord,
6701 RegisterFrom(destination),
6702 sp,
6703 source.GetStackIndex());
6704 } else if (destination.IsFpuRegister()) {
6705 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
6706 } else {
6707 DCHECK(destination.IsStackSlot());
6708 vixl32::Register temp = temps.Acquire();
6709 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
6710 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
6711 }
Scott Wakelingfe885462016-09-22 10:24:38 +01006712 } else if (source.IsFpuRegister()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01006713 if (destination.IsRegister()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006714 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01006715 } else if (destination.IsFpuRegister()) {
6716 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
6717 } else {
6718 DCHECK(destination.IsStackSlot());
6719 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
6720 }
Scott Wakelingfe885462016-09-22 10:24:38 +01006721 } else if (source.IsDoubleStackSlot()) {
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006722 if (destination.IsDoubleStackSlot()) {
6723 vixl32::DRegister temp = temps.AcquireD();
6724 GetAssembler()->LoadDFromOffset(temp, sp, source.GetStackIndex());
6725 GetAssembler()->StoreDToOffset(temp, sp, destination.GetStackIndex());
6726 } else if (destination.IsRegisterPair()) {
6727 DCHECK(ExpectedPairLayout(destination));
6728 GetAssembler()->LoadFromOffset(
6729 kLoadWordPair, LowRegisterFrom(destination), sp, source.GetStackIndex());
6730 } else {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01006731 DCHECK(destination.IsFpuRegisterPair()) << destination;
6732 GetAssembler()->LoadDFromOffset(DRegisterFrom(destination), sp, source.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006733 }
Scott Wakelingfe885462016-09-22 10:24:38 +01006734 } else if (source.IsRegisterPair()) {
6735 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006736 __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source));
6737 __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01006738 } else if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006739 __ Vmov(DRegisterFrom(destination), LowRegisterFrom(source), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01006740 } else {
6741 DCHECK(destination.IsDoubleStackSlot()) << destination;
6742 DCHECK(ExpectedPairLayout(source));
6743 GetAssembler()->StoreToOffset(kStoreWordPair,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006744 LowRegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01006745 sp,
6746 destination.GetStackIndex());
6747 }
6748 } else if (source.IsFpuRegisterPair()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01006749 if (destination.IsRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006750 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), DRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01006751 } else if (destination.IsFpuRegisterPair()) {
6752 __ Vmov(DRegisterFrom(destination), DRegisterFrom(source));
6753 } else {
6754 DCHECK(destination.IsDoubleStackSlot()) << destination;
6755 GetAssembler()->StoreDToOffset(DRegisterFrom(source), sp, destination.GetStackIndex());
6756 }
Scott Wakelingfe885462016-09-22 10:24:38 +01006757 } else {
6758 DCHECK(source.IsConstant()) << source;
6759 HConstant* constant = source.GetConstant();
6760 if (constant->IsIntConstant() || constant->IsNullConstant()) {
6761 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
6762 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006763 __ Mov(RegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01006764 } else {
6765 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01006766 vixl32::Register temp = temps.Acquire();
6767 __ Mov(temp, value);
6768 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
6769 }
6770 } else if (constant->IsLongConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00006771 int64_t value = Int64ConstantFrom(source);
Scott Wakelingfe885462016-09-22 10:24:38 +01006772 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006773 __ Mov(LowRegisterFrom(destination), Low32Bits(value));
6774 __ Mov(HighRegisterFrom(destination), High32Bits(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01006775 } else {
6776 DCHECK(destination.IsDoubleStackSlot()) << destination;
Scott Wakelingfe885462016-09-22 10:24:38 +01006777 vixl32::Register temp = temps.Acquire();
6778 __ Mov(temp, Low32Bits(value));
6779 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
6780 __ Mov(temp, High32Bits(value));
6781 GetAssembler()->StoreToOffset(kStoreWord,
6782 temp,
6783 sp,
6784 destination.GetHighStackIndex(kArmWordSize));
6785 }
6786 } else if (constant->IsDoubleConstant()) {
6787 double value = constant->AsDoubleConstant()->GetValue();
6788 if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01006789 __ Vmov(DRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01006790 } else {
6791 DCHECK(destination.IsDoubleStackSlot()) << destination;
6792 uint64_t int_value = bit_cast<uint64_t, double>(value);
Scott Wakelingfe885462016-09-22 10:24:38 +01006793 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006794 __ Mov(temp, Low32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01006795 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006796 __ Mov(temp, High32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01006797 GetAssembler()->StoreToOffset(kStoreWord,
6798 temp,
6799 sp,
6800 destination.GetHighStackIndex(kArmWordSize));
6801 }
6802 } else {
6803 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
6804 float value = constant->AsFloatConstant()->GetValue();
6805 if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006806 __ Vmov(SRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01006807 } else {
6808 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01006809 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006810 __ Mov(temp, bit_cast<int32_t, float>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01006811 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
6812 }
6813 }
6814 }
6815}
6816
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006817void ParallelMoveResolverARMVIXL::Exchange(vixl32::Register reg, int mem) {
6818 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
6819 vixl32::Register temp = temps.Acquire();
6820 __ Mov(temp, reg);
6821 GetAssembler()->LoadFromOffset(kLoadWord, reg, sp, mem);
6822 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Scott Wakelingfe885462016-09-22 10:24:38 +01006823}
6824
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006825void ParallelMoveResolverARMVIXL::Exchange(int mem1, int mem2) {
6826 // TODO(VIXL32): Double check the performance of this implementation.
6827 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00006828 vixl32::Register temp1 = temps.Acquire();
6829 ScratchRegisterScope ensure_scratch(
6830 this, temp1.GetCode(), r0.GetCode(), codegen_->GetNumberOfCoreRegisters());
6831 vixl32::Register temp2(ensure_scratch.GetRegister());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006832
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00006833 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
6834 GetAssembler()->LoadFromOffset(kLoadWord, temp1, sp, mem1 + stack_offset);
6835 GetAssembler()->LoadFromOffset(kLoadWord, temp2, sp, mem2 + stack_offset);
6836 GetAssembler()->StoreToOffset(kStoreWord, temp1, sp, mem2 + stack_offset);
6837 GetAssembler()->StoreToOffset(kStoreWord, temp2, sp, mem1 + stack_offset);
Scott Wakelingfe885462016-09-22 10:24:38 +01006838}
6839
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006840void ParallelMoveResolverARMVIXL::EmitSwap(size_t index) {
6841 MoveOperands* move = moves_[index];
6842 Location source = move->GetSource();
6843 Location destination = move->GetDestination();
6844 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
6845
6846 if (source.IsRegister() && destination.IsRegister()) {
6847 vixl32::Register temp = temps.Acquire();
6848 DCHECK(!RegisterFrom(source).Is(temp));
6849 DCHECK(!RegisterFrom(destination).Is(temp));
6850 __ Mov(temp, RegisterFrom(destination));
6851 __ Mov(RegisterFrom(destination), RegisterFrom(source));
6852 __ Mov(RegisterFrom(source), temp);
6853 } else if (source.IsRegister() && destination.IsStackSlot()) {
6854 Exchange(RegisterFrom(source), destination.GetStackIndex());
6855 } else if (source.IsStackSlot() && destination.IsRegister()) {
6856 Exchange(RegisterFrom(destination), source.GetStackIndex());
6857 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00006858 Exchange(source.GetStackIndex(), destination.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006859 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00006860 vixl32::Register temp = temps.Acquire();
Anton Kirilovdda43962016-11-21 19:55:20 +00006861 __ Vmov(temp, SRegisterFrom(source));
6862 __ Vmov(SRegisterFrom(source), SRegisterFrom(destination));
6863 __ Vmov(SRegisterFrom(destination), temp);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006864 } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
6865 vixl32::DRegister temp = temps.AcquireD();
6866 __ Vmov(temp, LowRegisterFrom(source), HighRegisterFrom(source));
6867 __ Mov(LowRegisterFrom(source), LowRegisterFrom(destination));
6868 __ Mov(HighRegisterFrom(source), HighRegisterFrom(destination));
6869 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), temp);
6870 } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
6871 vixl32::Register low_reg = LowRegisterFrom(source.IsRegisterPair() ? source : destination);
6872 int mem = source.IsRegisterPair() ? destination.GetStackIndex() : source.GetStackIndex();
6873 DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
6874 vixl32::DRegister temp = temps.AcquireD();
6875 __ Vmov(temp, low_reg, vixl32::Register(low_reg.GetCode() + 1));
6876 GetAssembler()->LoadFromOffset(kLoadWordPair, low_reg, sp, mem);
6877 GetAssembler()->StoreDToOffset(temp, sp, mem);
6878 } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006879 vixl32::DRegister first = DRegisterFrom(source);
6880 vixl32::DRegister second = DRegisterFrom(destination);
6881 vixl32::DRegister temp = temps.AcquireD();
6882 __ Vmov(temp, first);
6883 __ Vmov(first, second);
6884 __ Vmov(second, temp);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006885 } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00006886 vixl32::DRegister reg = source.IsFpuRegisterPair()
6887 ? DRegisterFrom(source)
6888 : DRegisterFrom(destination);
6889 int mem = source.IsFpuRegisterPair()
6890 ? destination.GetStackIndex()
6891 : source.GetStackIndex();
6892 vixl32::DRegister temp = temps.AcquireD();
6893 __ Vmov(temp, reg);
6894 GetAssembler()->LoadDFromOffset(reg, sp, mem);
6895 GetAssembler()->StoreDToOffset(temp, sp, mem);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006896 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
Anton Kirilovdda43962016-11-21 19:55:20 +00006897 vixl32::SRegister reg = source.IsFpuRegister()
6898 ? SRegisterFrom(source)
6899 : SRegisterFrom(destination);
6900 int mem = source.IsFpuRegister()
6901 ? destination.GetStackIndex()
6902 : source.GetStackIndex();
6903 vixl32::Register temp = temps.Acquire();
6904 __ Vmov(temp, reg);
6905 GetAssembler()->LoadSFromOffset(reg, sp, mem);
6906 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01006907 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
6908 vixl32::DRegister temp1 = temps.AcquireD();
6909 vixl32::DRegister temp2 = temps.AcquireD();
6910 __ Vldr(temp1, MemOperand(sp, source.GetStackIndex()));
6911 __ Vldr(temp2, MemOperand(sp, destination.GetStackIndex()));
6912 __ Vstr(temp1, MemOperand(sp, destination.GetStackIndex()));
6913 __ Vstr(temp2, MemOperand(sp, source.GetStackIndex()));
6914 } else {
6915 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
6916 }
Scott Wakelingfe885462016-09-22 10:24:38 +01006917}
6918
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00006919void ParallelMoveResolverARMVIXL::SpillScratch(int reg) {
6920 __ Push(vixl32::Register(reg));
Scott Wakelingfe885462016-09-22 10:24:38 +01006921}
6922
Nicolas Geoffray13a797b2017-03-15 16:41:31 +00006923void ParallelMoveResolverARMVIXL::RestoreScratch(int reg) {
6924 __ Pop(vixl32::Register(reg));
Scott Wakelingfe885462016-09-22 10:24:38 +01006925}
6926
Artem Serov02d37832016-10-25 15:25:33 +01006927HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
Artem Serovd4cc5b22016-11-04 11:19:09 +00006928 HLoadClass::LoadKind desired_class_load_kind) {
6929 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006930 case HLoadClass::LoadKind::kInvalid:
6931 LOG(FATAL) << "UNREACHABLE";
6932 UNREACHABLE();
Artem Serovd4cc5b22016-11-04 11:19:09 +00006933 case HLoadClass::LoadKind::kReferrersClass:
6934 break;
Artem Serovd4cc5b22016-11-04 11:19:09 +00006935 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006936 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006937 case HLoadClass::LoadKind::kBssEntry:
6938 DCHECK(!Runtime::Current()->UseJitCompilation());
6939 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006940 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006941 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006942 DCHECK(Runtime::Current()->UseJitCompilation());
Artem Serovc5fcb442016-12-02 19:19:58 +00006943 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006944 case HLoadClass::LoadKind::kRuntimeCall:
Artem Serovd4cc5b22016-11-04 11:19:09 +00006945 break;
6946 }
6947 return desired_class_load_kind;
Artem Serov02d37832016-10-25 15:25:33 +01006948}
6949
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006950void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006951 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006952 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006953 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006954 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006955 cls,
6956 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006957 LocationFrom(r0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00006958 DCHECK(calling_convention.GetRegisterAt(0).Is(r0));
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006959 return;
6960 }
Vladimir Marko41559982017-01-06 14:04:23 +00006961 DCHECK(!cls->NeedsAccessCheck());
Scott Wakelingfe885462016-09-22 10:24:38 +01006962
Artem Serovd4cc5b22016-11-04 11:19:09 +00006963 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6964 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006965 ? LocationSummary::kCallOnSlowPath
6966 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006967 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Artem Serovd4cc5b22016-11-04 11:19:09 +00006968 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00006969 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Artem Serovd4cc5b22016-11-04 11:19:09 +00006970 }
6971
Vladimir Marko41559982017-01-06 14:04:23 +00006972 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006973 locations->SetInAt(0, Location::RequiresRegister());
6974 }
6975 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006976 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6977 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6978 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006979 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006980 } else {
6981 // For non-Baker read barrier we have a temp-clobbering call.
6982 }
6983 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006984}
6985
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006986// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6987// move.
6988void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006989 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006990 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006991 codegen_->GenerateLoadClassRuntimeCall(cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08006992 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 14);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006993 return;
6994 }
Vladimir Marko41559982017-01-06 14:04:23 +00006995 DCHECK(!cls->NeedsAccessCheck());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006996
Vladimir Marko41559982017-01-06 14:04:23 +00006997 LocationSummary* locations = cls->GetLocations();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006998 Location out_loc = locations->Out();
6999 vixl32::Register out = OutputRegister(cls);
7000
Artem Serovd4cc5b22016-11-04 11:19:09 +00007001 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7002 ? kWithoutReadBarrier
7003 : kCompilerReadBarrierOption;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007004 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00007005 switch (load_kind) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007006 case HLoadClass::LoadKind::kReferrersClass: {
7007 DCHECK(!cls->CanCallRuntime());
7008 DCHECK(!cls->MustGenerateClinitCheck());
7009 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7010 vixl32::Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00007011 codegen_->GenerateGcRootFieldLoad(cls,
7012 out_loc,
7013 current_method,
7014 ArtMethod::DeclaringClassOffset().Int32Value(),
7015 read_barrier_option);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007016 break;
7017 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00007018 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007019 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007020 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
7021 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007022 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007023 codegen_->EmitMovwMovtPlaceholder(labels, out);
7024 break;
7025 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007026 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007027 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7028 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007029 codegen_->NewBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007030 codegen_->EmitMovwMovtPlaceholder(labels, out);
Andreas Gampe3db70682018-12-26 15:12:03 -08007031 __ Ldr(out, MemOperand(out, /* offset= */ 0));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007032 break;
7033 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007034 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007035 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko1998cd02017-01-13 13:02:58 +00007036 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Vladimir Markof3c52b42017-11-17 17:32:12 +00007037 codegen_->EmitMovwMovtPlaceholder(labels, out);
Andreas Gampe3db70682018-12-26 15:12:03 -08007038 codegen_->GenerateGcRootFieldLoad(cls, out_loc, out, /* offset= */ 0, read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007039 generate_null_check = true;
7040 break;
7041 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007042 case HLoadClass::LoadKind::kJitBootImageAddress: {
7043 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
7044 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
7045 DCHECK_NE(address, 0u);
7046 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7047 break;
7048 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007049 case HLoadClass::LoadKind::kJitTableAddress: {
Artem Serovc5fcb442016-12-02 19:19:58 +00007050 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
7051 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007052 cls->GetClass()));
Artem Serovc5fcb442016-12-02 19:19:58 +00007053 // /* GcRoot<mirror::Class> */ out = *out
Andreas Gampe3db70682018-12-26 15:12:03 -08007054 codegen_->GenerateGcRootFieldLoad(cls, out_loc, out, /* offset= */ 0, read_barrier_option);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007055 break;
7056 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007057 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007058 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007059 LOG(FATAL) << "UNREACHABLE";
7060 UNREACHABLE();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007061 }
7062
7063 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7064 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007065 LoadClassSlowPathARMVIXL* slow_path =
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007066 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARMVIXL(cls, cls);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007067 codegen_->AddSlowPath(slow_path);
7068 if (generate_null_check) {
xueliang.zhongf51bc622016-11-04 09:23:32 +00007069 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007070 }
7071 if (cls->MustGenerateClinitCheck()) {
7072 GenerateClassInitializationCheck(slow_path, out);
7073 } else {
7074 __ Bind(slow_path->GetExitLabel());
7075 }
Andreas Gampe3db70682018-12-26 15:12:03 -08007076 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 15);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01007077 }
7078}
7079
Orion Hodsondbaa5c72018-05-10 08:22:46 +01007080void LocationsBuilderARMVIXL::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7081 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7082 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
7083 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
7084}
7085
7086void InstructionCodeGeneratorARMVIXL::VisitLoadMethodHandle(HLoadMethodHandle* load) {
7087 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
7088}
7089
Orion Hodson18259d72018-04-12 11:18:23 +01007090void LocationsBuilderARMVIXL::VisitLoadMethodType(HLoadMethodType* load) {
7091 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7092 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
7093 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
7094}
7095
7096void InstructionCodeGeneratorARMVIXL::VisitLoadMethodType(HLoadMethodType* load) {
7097 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
7098}
7099
Artem Serov02d37832016-10-25 15:25:33 +01007100void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) {
7101 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007102 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Artem Serov02d37832016-10-25 15:25:33 +01007103 locations->SetInAt(0, Location::RequiresRegister());
7104 if (check->HasUses()) {
7105 locations->SetOut(Location::SameAsFirstInput());
7106 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007107 // Rely on the type initialization to save everything we need.
7108 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Artem Serov02d37832016-10-25 15:25:33 +01007109}
7110
7111void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) {
7112 // We assume the class is not null.
7113 LoadClassSlowPathARMVIXL* slow_path =
Vladimir Markoa9f303c2018-07-20 16:43:56 +01007114 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARMVIXL(check->GetLoadClass(), check);
Artem Serov02d37832016-10-25 15:25:33 +01007115 codegen_->AddSlowPath(slow_path);
7116 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
7117}
7118
7119void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck(
7120 LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) {
7121 UseScratchRegisterScope temps(GetVIXLAssembler());
7122 vixl32::Register temp = temps.Acquire();
Vladimir Markodc682aa2018-01-04 18:42:57 +00007123 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
7124 const size_t status_byte_offset =
7125 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
7126 constexpr uint32_t shifted_initialized_value =
7127 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
7128
7129 GetAssembler()->LoadFromOffset(kLoadUnsignedByte, temp, class_reg, status_byte_offset);
7130 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00007131 __ B(lo, slow_path->GetEntryLabel());
Artem Serov02d37832016-10-25 15:25:33 +01007132 // Even if the initialized flag is set, we may be in a situation where caches are not synced
7133 // properly. Therefore, we do a memory fence.
7134 __ Dmb(ISH);
7135 __ Bind(slow_path->GetExitLabel());
7136}
7137
Vladimir Marko175e7862018-03-27 09:03:13 +00007138void InstructionCodeGeneratorARMVIXL::GenerateBitstringTypeCheckCompare(
7139 HTypeCheckInstruction* check,
7140 vixl32::Register temp,
7141 vixl32::FlagsUpdate flags_update) {
7142 uint32_t path_to_root = check->GetBitstringPathToRoot();
7143 uint32_t mask = check->GetBitstringMask();
7144 DCHECK(IsPowerOfTwo(mask + 1));
7145 size_t mask_bits = WhichPowerOf2(mask + 1);
7146
7147 // Note that HInstanceOf shall check for zero value in `temp` but HCheckCast needs
7148 // the Z flag for BNE. This is indicated by the `flags_update` parameter.
7149 if (mask_bits == 16u) {
7150 // Load only the bitstring part of the status word.
7151 __ Ldrh(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
7152 // Check if the bitstring bits are equal to `path_to_root`.
7153 if (flags_update == SetFlags) {
7154 __ Cmp(temp, path_to_root);
7155 } else {
7156 __ Sub(temp, temp, path_to_root);
7157 }
7158 } else {
7159 // /* uint32_t */ temp = temp->status_
7160 __ Ldr(temp, MemOperand(temp, mirror::Class::StatusOffset().Int32Value()));
7161 if (GetAssembler()->ShifterOperandCanHold(SUB, path_to_root)) {
7162 // Compare the bitstring bits using SUB.
7163 __ Sub(temp, temp, path_to_root);
7164 // Shift out bits that do not contribute to the comparison.
7165 __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7166 } else if (IsUint<16>(path_to_root)) {
7167 if (temp.IsLow()) {
7168 // Note: Optimized for size but contains one more dependent instruction than necessary.
7169 // MOVW+SUB(register) would be 8 bytes unless we find a low-reg temporary but the
7170 // macro assembler would use the high reg IP for the constant by default.
7171 // Compare the bitstring bits using SUB.
7172 __ Sub(temp, temp, path_to_root & 0x00ffu); // 16-bit SUB (immediate) T2
7173 __ Sub(temp, temp, path_to_root & 0xff00u); // 32-bit SUB (immediate) T3
7174 // Shift out bits that do not contribute to the comparison.
7175 __ Lsl(flags_update, temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7176 } else {
7177 // Extract the bitstring bits.
7178 __ Ubfx(temp, temp, 0, mask_bits);
7179 // Check if the bitstring bits are equal to `path_to_root`.
7180 if (flags_update == SetFlags) {
7181 __ Cmp(temp, path_to_root);
7182 } else {
7183 __ Sub(temp, temp, path_to_root);
7184 }
7185 }
7186 } else {
7187 // Shift out bits that do not contribute to the comparison.
7188 __ Lsl(temp, temp, dchecked_integral_cast<uint32_t>(32u - mask_bits));
7189 // Check if the shifted bitstring bits are equal to `path_to_root << (32u - mask_bits)`.
7190 if (flags_update == SetFlags) {
7191 __ Cmp(temp, path_to_root << (32u - mask_bits));
7192 } else {
7193 __ Sub(temp, temp, path_to_root << (32u - mask_bits));
7194 }
7195 }
7196 }
7197}
7198
Artem Serov02d37832016-10-25 15:25:33 +01007199HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
Artem Serovd4cc5b22016-11-04 11:19:09 +00007200 HLoadString::LoadKind desired_string_load_kind) {
7201 switch (desired_string_load_kind) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007202 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007203 case HLoadString::LoadKind::kBootImageRelRo:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007204 case HLoadString::LoadKind::kBssEntry:
7205 DCHECK(!Runtime::Current()->UseJitCompilation());
7206 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007207 case HLoadString::LoadKind::kJitBootImageAddress:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007208 case HLoadString::LoadKind::kJitTableAddress:
7209 DCHECK(Runtime::Current()->UseJitCompilation());
Artem Serovc5fcb442016-12-02 19:19:58 +00007210 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007211 case HLoadString::LoadKind::kRuntimeCall:
Artem Serovd4cc5b22016-11-04 11:19:09 +00007212 break;
7213 }
7214 return desired_string_load_kind;
Artem Serov02d37832016-10-25 15:25:33 +01007215}
7216
7217void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007218 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007219 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Artem Serov02d37832016-10-25 15:25:33 +01007220 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007221 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Artem Serov02d37832016-10-25 15:25:33 +01007222 locations->SetOut(LocationFrom(r0));
7223 } else {
7224 locations->SetOut(Location::RequiresRegister());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007225 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7226 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007227 // Rely on the pResolveString and marking to save everything we need, including temps.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01007228 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007229 } else {
7230 // For non-Baker read barrier we have a temp-clobbering call.
7231 }
7232 }
Artem Serov02d37832016-10-25 15:25:33 +01007233 }
7234}
7235
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007236// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7237// move.
7238void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007239 LocationSummary* locations = load->GetLocations();
7240 Location out_loc = locations->Out();
7241 vixl32::Register out = OutputRegister(load);
7242 HLoadString::LoadKind load_kind = load->GetLoadKind();
7243
7244 switch (load_kind) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007245 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
7246 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
7247 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007248 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Artem Serovd4cc5b22016-11-04 11:19:09 +00007249 codegen_->EmitMovwMovtPlaceholder(labels, out);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007250 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007251 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007252 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007253 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7254 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00007255 codegen_->NewBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007256 codegen_->EmitMovwMovtPlaceholder(labels, out);
Andreas Gampe3db70682018-12-26 15:12:03 -08007257 __ Ldr(out, MemOperand(out, /* offset= */ 0));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007258 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007259 }
7260 case HLoadString::LoadKind::kBssEntry: {
Artem Serovd4cc5b22016-11-04 11:19:09 +00007261 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007262 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
Vladimir Markof3c52b42017-11-17 17:32:12 +00007263 codegen_->EmitMovwMovtPlaceholder(labels, out);
Vladimir Markoca1e0382018-04-11 09:58:41 +00007264 codegen_->GenerateGcRootFieldLoad(
Andreas Gampe3db70682018-12-26 15:12:03 -08007265 load, out_loc, out, /* offset= */ 0, kCompilerReadBarrierOption);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007266 LoadStringSlowPathARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007267 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARMVIXL(load);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007268 codegen_->AddSlowPath(slow_path);
7269 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7270 __ Bind(slow_path->GetExitLabel());
Andreas Gampe3db70682018-12-26 15:12:03 -08007271 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 16);
Artem Serovd4cc5b22016-11-04 11:19:09 +00007272 return;
7273 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01007274 case HLoadString::LoadKind::kJitBootImageAddress: {
7275 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
7276 DCHECK_NE(address, 0u);
7277 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7278 return;
7279 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00007280 case HLoadString::LoadKind::kJitTableAddress: {
Artem Serovc5fcb442016-12-02 19:19:58 +00007281 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007282 load->GetStringIndex(),
7283 load->GetString()));
Artem Serovc5fcb442016-12-02 19:19:58 +00007284 // /* GcRoot<mirror::String> */ out = *out
Vladimir Markoca1e0382018-04-11 09:58:41 +00007285 codegen_->GenerateGcRootFieldLoad(
Andreas Gampe3db70682018-12-26 15:12:03 -08007286 load, out_loc, out, /* offset= */ 0, kCompilerReadBarrierOption);
Artem Serovc5fcb442016-12-02 19:19:58 +00007287 return;
Artem Serovd4cc5b22016-11-04 11:19:09 +00007288 }
7289 default:
7290 break;
7291 }
Artem Serov02d37832016-10-25 15:25:33 +01007292
7293 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007294 DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kRuntimeCall);
Artem Serov02d37832016-10-25 15:25:33 +01007295 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007296 __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Artem Serov02d37832016-10-25 15:25:33 +01007297 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7298 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Andreas Gampe3db70682018-12-26 15:12:03 -08007299 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 17);
Artem Serov02d37832016-10-25 15:25:33 +01007300}
7301
7302static int32_t GetExceptionTlsOffset() {
7303 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
7304}
7305
7306void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) {
7307 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007308 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01007309 locations->SetOut(Location::RequiresRegister());
7310}
7311
7312void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) {
7313 vixl32::Register out = OutputRegister(load);
7314 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
7315}
7316
7317
7318void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007319 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
Artem Serov02d37832016-10-25 15:25:33 +01007320}
7321
7322void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7323 UseScratchRegisterScope temps(GetVIXLAssembler());
7324 vixl32::Register temp = temps.Acquire();
7325 __ Mov(temp, 0);
7326 GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset());
7327}
7328
7329void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007330 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7331 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov02d37832016-10-25 15:25:33 +01007332 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7333 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
7334}
7335
7336void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
7337 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
7338 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7339}
7340
Artem Serov657022c2016-11-23 14:19:38 +00007341// Temp is used for read barrier.
7342static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
7343 if (kEmitCompilerReadBarrier &&
7344 (kUseBakerReadBarrier ||
7345 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
7346 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
7347 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
7348 return 1;
7349 }
7350 return 0;
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007351}
7352
Artem Serov657022c2016-11-23 14:19:38 +00007353// Interface case has 3 temps, one for holding the number of interfaces, one for the current
7354// interface pointer, one for loading the current interface.
7355// The other checks have one temp for loading the object's class.
7356static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
7357 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7358 return 3;
7359 }
7360 return 1 + NumberOfInstanceOfTemps(type_check_kind);
7361}
Artem Serovcfbe9132016-10-14 15:58:56 +01007362
7363void LocationsBuilderARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
7364 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7365 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7366 bool baker_read_barrier_slow_path = false;
7367 switch (type_check_kind) {
7368 case TypeCheckKind::kExactCheck:
7369 case TypeCheckKind::kAbstractClassCheck:
7370 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00007371 case TypeCheckKind::kArrayObjectCheck: {
7372 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
7373 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
7374 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Artem Serovcfbe9132016-10-14 15:58:56 +01007375 break;
Vladimir Marko87584542017-12-12 17:47:52 +00007376 }
Artem Serovcfbe9132016-10-14 15:58:56 +01007377 case TypeCheckKind::kArrayCheck:
7378 case TypeCheckKind::kUnresolvedCheck:
7379 case TypeCheckKind::kInterfaceCheck:
7380 call_kind = LocationSummary::kCallOnSlowPath;
7381 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00007382 case TypeCheckKind::kBitstringCheck:
7383 break;
Artem Serovcfbe9132016-10-14 15:58:56 +01007384 }
7385
Vladimir Markoca6fff82017-10-03 14:49:14 +01007386 LocationSummary* locations =
7387 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Artem Serovcfbe9132016-10-14 15:58:56 +01007388 if (baker_read_barrier_slow_path) {
7389 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7390 }
7391 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007392 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7393 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7394 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7395 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
7396 } else {
7397 locations->SetInAt(1, Location::RequiresRegister());
7398 }
Artem Serovcfbe9132016-10-14 15:58:56 +01007399 // The "out" register is used as a temporary, so it overlaps with the inputs.
7400 // Note that TypeCheckSlowPathARM uses this register too.
7401 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Artem Serov657022c2016-11-23 14:19:38 +00007402 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Artem Serovcfbe9132016-10-14 15:58:56 +01007403}
7404
7405void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
7406 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7407 LocationSummary* locations = instruction->GetLocations();
7408 Location obj_loc = locations->InAt(0);
7409 vixl32::Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00007410 vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
7411 ? vixl32::Register()
7412 : InputRegisterAt(instruction, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007413 Location out_loc = locations->Out();
7414 vixl32::Register out = OutputRegister(instruction);
Artem Serov657022c2016-11-23 14:19:38 +00007415 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7416 DCHECK_LE(num_temps, 1u);
7417 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Artem Serovcfbe9132016-10-14 15:58:56 +01007418 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7419 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7420 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7421 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007422 vixl32::Label done;
7423 vixl32::Label* const final_label = codegen_->GetFinalLabel(instruction, &done);
Artem Serovcfbe9132016-10-14 15:58:56 +01007424 SlowPathCodeARMVIXL* slow_path = nullptr;
7425
7426 // Return 0 if `obj` is null.
7427 // avoid null check if we know obj is not null.
7428 if (instruction->MustDoNullCheck()) {
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007429 DCHECK(!out.Is(obj));
7430 __ Mov(out, 0);
Andreas Gampe3db70682018-12-26 15:12:03 -08007431 __ CompareAndBranchIfZero(obj, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007432 }
7433
Artem Serovcfbe9132016-10-14 15:58:56 +01007434 switch (type_check_kind) {
7435 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007436 ReadBarrierOption read_barrier_option =
7437 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007438 // /* HeapReference<Class> */ out = obj->klass_
7439 GenerateReferenceLoadTwoRegisters(instruction,
7440 out_loc,
7441 obj_loc,
7442 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007443 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007444 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007445 // Classes must be equal for the instanceof to succeed.
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007446 __ Cmp(out, cls);
7447 // We speculatively set the result to false without changing the condition
7448 // flags, which allows us to avoid some branching later.
7449 __ Mov(LeaveFlags, out, 0);
7450
7451 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7452 // we check that the output is in a low register, so that a 16-bit MOV
7453 // encoding can be used.
7454 if (out.IsLow()) {
7455 // We use the scope because of the IT block that follows.
7456 ExactAssemblyScope guard(GetVIXLAssembler(),
7457 2 * vixl32::k16BitT32InstructionSizeInBytes,
7458 CodeBufferCheckScope::kExactSize);
7459
7460 __ it(eq);
7461 __ mov(eq, out, 1);
7462 } else {
Andreas Gampe3db70682018-12-26 15:12:03 -08007463 __ B(ne, final_label, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007464 __ Mov(out, 1);
7465 }
7466
Artem Serovcfbe9132016-10-14 15:58:56 +01007467 break;
7468 }
7469
7470 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007471 ReadBarrierOption read_barrier_option =
7472 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007473 // /* HeapReference<Class> */ out = obj->klass_
7474 GenerateReferenceLoadTwoRegisters(instruction,
7475 out_loc,
7476 obj_loc,
7477 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007478 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007479 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007480 // If the class is abstract, we eagerly fetch the super class of the
7481 // object to avoid doing a comparison we know will fail.
7482 vixl32::Label loop;
7483 __ Bind(&loop);
7484 // /* HeapReference<Class> */ out = out->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007485 GenerateReferenceLoadOneRegister(instruction,
7486 out_loc,
7487 super_offset,
7488 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007489 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007490 // If `out` is null, we use it for the result, and jump to the final label.
Andreas Gampe3db70682018-12-26 15:12:03 -08007491 __ CompareAndBranchIfZero(out, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007492 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007493 __ B(ne, &loop, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007494 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007495 break;
7496 }
7497
7498 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007499 ReadBarrierOption read_barrier_option =
7500 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007501 // /* HeapReference<Class> */ out = obj->klass_
7502 GenerateReferenceLoadTwoRegisters(instruction,
7503 out_loc,
7504 obj_loc,
7505 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007506 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007507 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007508 // Walk over the class hierarchy to find a match.
7509 vixl32::Label loop, success;
7510 __ Bind(&loop);
7511 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007512 __ B(eq, &success, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007513 // /* HeapReference<Class> */ out = out->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007514 GenerateReferenceLoadOneRegister(instruction,
7515 out_loc,
7516 super_offset,
7517 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007518 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007519 // This is essentially a null check, but it sets the condition flags to the
7520 // proper value for the code that follows the loop, i.e. not `eq`.
7521 __ Cmp(out, 1);
Andreas Gampe3db70682018-12-26 15:12:03 -08007522 __ B(hs, &loop, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007523
7524 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7525 // we check that the output is in a low register, so that a 16-bit MOV
7526 // encoding can be used.
7527 if (out.IsLow()) {
7528 // If `out` is null, we use it for the result, and the condition flags
7529 // have already been set to `ne`, so the IT block that comes afterwards
7530 // (and which handles the successful case) turns into a NOP (instead of
7531 // overwriting `out`).
7532 __ Bind(&success);
7533
7534 // We use the scope because of the IT block that follows.
7535 ExactAssemblyScope guard(GetVIXLAssembler(),
7536 2 * vixl32::k16BitT32InstructionSizeInBytes,
7537 CodeBufferCheckScope::kExactSize);
7538
7539 // There is only one branch to the `success` label (which is bound to this
7540 // IT block), and it has the same condition, `eq`, so in that case the MOV
7541 // is executed.
7542 __ it(eq);
7543 __ mov(eq, out, 1);
7544 } else {
7545 // If `out` is null, we use it for the result, and jump to the final label.
Anton Kirilov6f644202017-02-27 18:29:45 +00007546 __ B(final_label);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007547 __ Bind(&success);
7548 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007549 }
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007550
Artem Serovcfbe9132016-10-14 15:58:56 +01007551 break;
7552 }
7553
7554 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007555 ReadBarrierOption read_barrier_option =
7556 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007557 // /* HeapReference<Class> */ out = obj->klass_
7558 GenerateReferenceLoadTwoRegisters(instruction,
7559 out_loc,
7560 obj_loc,
7561 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007562 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007563 read_barrier_option);
Artem Serovcfbe9132016-10-14 15:58:56 +01007564 // Do an exact check.
7565 vixl32::Label exact_check;
7566 __ Cmp(out, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007567 __ B(eq, &exact_check, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007568 // Otherwise, we need to check that the object's class is a non-primitive array.
7569 // /* HeapReference<Class> */ out = out->component_type_
Artem Serov657022c2016-11-23 14:19:38 +00007570 GenerateReferenceLoadOneRegister(instruction,
7571 out_loc,
7572 component_offset,
7573 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007574 read_barrier_option);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007575 // If `out` is null, we use it for the result, and jump to the final label.
Andreas Gampe3db70682018-12-26 15:12:03 -08007576 __ CompareAndBranchIfZero(out, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007577 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7578 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007579 __ Cmp(out, 0);
7580 // We speculatively set the result to false without changing the condition
7581 // flags, which allows us to avoid some branching later.
7582 __ Mov(LeaveFlags, out, 0);
7583
7584 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7585 // we check that the output is in a low register, so that a 16-bit MOV
7586 // encoding can be used.
7587 if (out.IsLow()) {
7588 __ Bind(&exact_check);
7589
7590 // We use the scope because of the IT block that follows.
7591 ExactAssemblyScope guard(GetVIXLAssembler(),
7592 2 * vixl32::k16BitT32InstructionSizeInBytes,
7593 CodeBufferCheckScope::kExactSize);
7594
7595 __ it(eq);
7596 __ mov(eq, out, 1);
7597 } else {
Andreas Gampe3db70682018-12-26 15:12:03 -08007598 __ B(ne, final_label, /* is_far_target= */ false);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007599 __ Bind(&exact_check);
7600 __ Mov(out, 1);
7601 }
7602
Artem Serovcfbe9132016-10-14 15:58:56 +01007603 break;
7604 }
7605
7606 case TypeCheckKind::kArrayCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00007607 // No read barrier since the slow path will retry upon failure.
Mathieu Chartier6beced42016-11-15 15:51:31 -08007608 // /* HeapReference<Class> */ out = obj->klass_
7609 GenerateReferenceLoadTwoRegisters(instruction,
7610 out_loc,
7611 obj_loc,
7612 class_offset,
Artem Serov657022c2016-11-23 14:19:38 +00007613 maybe_temp_loc,
7614 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01007615 __ Cmp(out, cls);
7616 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007617 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
Andreas Gampe3db70682018-12-26 15:12:03 -08007618 instruction, /* is_fatal= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007619 codegen_->AddSlowPath(slow_path);
7620 __ B(ne, slow_path->GetEntryLabel());
7621 __ Mov(out, 1);
Artem Serovcfbe9132016-10-14 15:58:56 +01007622 break;
7623 }
7624
7625 case TypeCheckKind::kUnresolvedCheck:
7626 case TypeCheckKind::kInterfaceCheck: {
7627 // Note that we indeed only call on slow path, but we always go
7628 // into the slow path for the unresolved and interface check
7629 // cases.
7630 //
7631 // We cannot directly call the InstanceofNonTrivial runtime
7632 // entry point without resorting to a type checking slow path
7633 // here (i.e. by calling InvokeRuntime directly), as it would
7634 // require to assign fixed registers for the inputs of this
7635 // HInstanceOf instruction (following the runtime calling
7636 // convention), which might be cluttered by the potential first
7637 // read barrier emission at the beginning of this method.
7638 //
7639 // TODO: Introduce a new runtime entry point taking the object
7640 // to test (instead of its class) as argument, and let it deal
7641 // with the read barrier issues. This will let us refactor this
7642 // case of the `switch` code as it was previously (with a direct
7643 // call to the runtime not using a type checking slow path).
7644 // This should also be beneficial for the other cases above.
7645 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007646 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
Andreas Gampe3db70682018-12-26 15:12:03 -08007647 instruction, /* is_fatal= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007648 codegen_->AddSlowPath(slow_path);
7649 __ B(slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01007650 break;
7651 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007652
7653 case TypeCheckKind::kBitstringCheck: {
7654 // /* HeapReference<Class> */ temp = obj->klass_
7655 GenerateReferenceLoadTwoRegisters(instruction,
7656 out_loc,
7657 obj_loc,
7658 class_offset,
7659 maybe_temp_loc,
7660 kWithoutReadBarrier);
7661
7662 GenerateBitstringTypeCheckCompare(instruction, out, DontCare);
7663 // If `out` is a low reg and we would have another low reg temp, we could
7664 // optimize this as RSBS+ADC, see GenerateConditionWithZero().
7665 //
7666 // Also, in some cases when `out` is a low reg and we're loading a constant to IP
7667 // it would make sense to use CMP+MOV+IT+MOV instead of SUB+CLZ+LSR as the code size
7668 // would be the same and we would have fewer direct data dependencies.
7669 codegen_->GenerateConditionWithZero(kCondEQ, out, out); // CLZ+LSR
7670 break;
7671 }
Artem Serovcfbe9132016-10-14 15:58:56 +01007672 }
7673
Artem Serovcfbe9132016-10-14 15:58:56 +01007674 if (done.IsReferenced()) {
7675 __ Bind(&done);
7676 }
7677
7678 if (slow_path != nullptr) {
7679 __ Bind(slow_path->GetExitLabel());
7680 }
7681}
7682
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007683void LocationsBuilderARMVIXL::VisitCheckCast(HCheckCast* instruction) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007684 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00007685 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007686 LocationSummary* locations =
7687 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007688 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007689 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7690 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7691 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7692 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
7693 } else {
7694 locations->SetInAt(1, Location::RequiresRegister());
7695 }
Artem Serov657022c2016-11-23 14:19:38 +00007696 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007697}
7698
7699void InstructionCodeGeneratorARMVIXL::VisitCheckCast(HCheckCast* instruction) {
7700 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7701 LocationSummary* locations = instruction->GetLocations();
7702 Location obj_loc = locations->InAt(0);
7703 vixl32::Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00007704 vixl32::Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
7705 ? vixl32::Register()
7706 : InputRegisterAt(instruction, 1);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007707 Location temp_loc = locations->GetTemp(0);
7708 vixl32::Register temp = RegisterFrom(temp_loc);
Artem Serov657022c2016-11-23 14:19:38 +00007709 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7710 DCHECK_LE(num_temps, 3u);
7711 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7712 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
7713 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7714 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7715 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7716 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7717 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7718 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7719 const uint32_t object_array_data_offset =
7720 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007721
Vladimir Marko87584542017-12-12 17:47:52 +00007722 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007723 SlowPathCodeARMVIXL* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007724 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARMVIXL(
7725 instruction, is_type_check_slow_path_fatal);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007726 codegen_->AddSlowPath(type_check_slow_path);
7727
7728 vixl32::Label done;
Anton Kirilov6f644202017-02-27 18:29:45 +00007729 vixl32::Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007730 // Avoid null check if we know obj is not null.
7731 if (instruction->MustDoNullCheck()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08007732 __ CompareAndBranchIfZero(obj, final_label, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007733 }
7734
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007735 switch (type_check_kind) {
7736 case TypeCheckKind::kExactCheck:
7737 case TypeCheckKind::kArrayCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00007738 // /* HeapReference<Class> */ temp = obj->klass_
7739 GenerateReferenceLoadTwoRegisters(instruction,
7740 temp_loc,
7741 obj_loc,
7742 class_offset,
7743 maybe_temp2_loc,
7744 kWithoutReadBarrier);
7745
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007746 __ Cmp(temp, cls);
7747 // Jump to slow path for throwing the exception or doing a
7748 // more involved array check.
7749 __ B(ne, type_check_slow_path->GetEntryLabel());
7750 break;
7751 }
7752
7753 case TypeCheckKind::kAbstractClassCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00007754 // /* HeapReference<Class> */ temp = obj->klass_
7755 GenerateReferenceLoadTwoRegisters(instruction,
7756 temp_loc,
7757 obj_loc,
7758 class_offset,
7759 maybe_temp2_loc,
7760 kWithoutReadBarrier);
7761
Artem Serovcfbe9132016-10-14 15:58:56 +01007762 // If the class is abstract, we eagerly fetch the super class of the
7763 // object to avoid doing a comparison we know will fail.
7764 vixl32::Label loop;
7765 __ Bind(&loop);
7766 // /* HeapReference<Class> */ temp = temp->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007767 GenerateReferenceLoadOneRegister(instruction,
7768 temp_loc,
7769 super_offset,
7770 maybe_temp2_loc,
7771 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01007772
7773 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7774 // exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00007775 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01007776
7777 // Otherwise, compare the classes.
7778 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007779 __ B(ne, &loop, /* is_far_target= */ false);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007780 break;
7781 }
7782
7783 case TypeCheckKind::kClassHierarchyCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00007784 // /* HeapReference<Class> */ temp = obj->klass_
7785 GenerateReferenceLoadTwoRegisters(instruction,
7786 temp_loc,
7787 obj_loc,
7788 class_offset,
7789 maybe_temp2_loc,
7790 kWithoutReadBarrier);
7791
Artem Serovcfbe9132016-10-14 15:58:56 +01007792 // Walk over the class hierarchy to find a match.
7793 vixl32::Label loop;
7794 __ Bind(&loop);
7795 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007796 __ B(eq, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007797
7798 // /* HeapReference<Class> */ temp = temp->super_class_
Artem Serov657022c2016-11-23 14:19:38 +00007799 GenerateReferenceLoadOneRegister(instruction,
7800 temp_loc,
7801 super_offset,
7802 maybe_temp2_loc,
7803 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01007804
7805 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7806 // exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00007807 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01007808 // Otherwise, jump to the beginning of the loop.
7809 __ B(&loop);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007810 break;
7811 }
7812
Artem Serovcfbe9132016-10-14 15:58:56 +01007813 case TypeCheckKind::kArrayObjectCheck: {
Artem Serov657022c2016-11-23 14:19:38 +00007814 // /* HeapReference<Class> */ temp = obj->klass_
7815 GenerateReferenceLoadTwoRegisters(instruction,
7816 temp_loc,
7817 obj_loc,
7818 class_offset,
7819 maybe_temp2_loc,
7820 kWithoutReadBarrier);
7821
Artem Serovcfbe9132016-10-14 15:58:56 +01007822 // Do an exact check.
7823 __ Cmp(temp, cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08007824 __ B(eq, final_label, /* is_far_target= */ false);
Artem Serovcfbe9132016-10-14 15:58:56 +01007825
7826 // Otherwise, we need to check that the object's class is a non-primitive array.
7827 // /* HeapReference<Class> */ temp = temp->component_type_
Artem Serov657022c2016-11-23 14:19:38 +00007828 GenerateReferenceLoadOneRegister(instruction,
7829 temp_loc,
7830 component_offset,
7831 maybe_temp2_loc,
7832 kWithoutReadBarrier);
Artem Serovcfbe9132016-10-14 15:58:56 +01007833 // If the component type is null, jump to the slow path to throw the exception.
xueliang.zhongf51bc622016-11-04 09:23:32 +00007834 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Artem Serovcfbe9132016-10-14 15:58:56 +01007835 // Otherwise,the object is indeed an array, jump to label `check_non_primitive_component_type`
7836 // to further check that this component type is not a primitive type.
7837 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007838 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00007839 __ CompareAndBranchIfNonZero(temp, type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007840 break;
7841 }
7842
7843 case TypeCheckKind::kUnresolvedCheck:
Artem Serov657022c2016-11-23 14:19:38 +00007844 // We always go into the type check slow path for the unresolved check case.
Artem Serovcfbe9132016-10-14 15:58:56 +01007845 // We cannot directly call the CheckCast runtime entry point
7846 // without resorting to a type checking slow path here (i.e. by
7847 // calling InvokeRuntime directly), as it would require to
7848 // assign fixed registers for the inputs of this HInstanceOf
7849 // instruction (following the runtime calling convention), which
7850 // might be cluttered by the potential first read barrier
7851 // emission at the beginning of this method.
Artem Serov657022c2016-11-23 14:19:38 +00007852
Artem Serovcfbe9132016-10-14 15:58:56 +01007853 __ B(type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007854 break;
Artem Serov657022c2016-11-23 14:19:38 +00007855
7856 case TypeCheckKind::kInterfaceCheck: {
7857 // Avoid read barriers to improve performance of the fast path. We can not get false
7858 // positives by doing this.
7859 // /* HeapReference<Class> */ temp = obj->klass_
7860 GenerateReferenceLoadTwoRegisters(instruction,
7861 temp_loc,
7862 obj_loc,
7863 class_offset,
7864 maybe_temp2_loc,
7865 kWithoutReadBarrier);
7866
7867 // /* HeapReference<Class> */ temp = temp->iftable_
7868 GenerateReferenceLoadTwoRegisters(instruction,
7869 temp_loc,
7870 temp_loc,
7871 iftable_offset,
7872 maybe_temp2_loc,
7873 kWithoutReadBarrier);
7874 // Iftable is never null.
7875 __ Ldr(RegisterFrom(maybe_temp2_loc), MemOperand(temp, array_length_offset));
7876 // Loop through the iftable and check if any class matches.
7877 vixl32::Label start_loop;
7878 __ Bind(&start_loop);
7879 __ CompareAndBranchIfZero(RegisterFrom(maybe_temp2_loc),
7880 type_check_slow_path->GetEntryLabel());
7881 __ Ldr(RegisterFrom(maybe_temp3_loc), MemOperand(temp, object_array_data_offset));
7882 GetAssembler()->MaybeUnpoisonHeapReference(RegisterFrom(maybe_temp3_loc));
7883 // Go to next interface.
7884 __ Add(temp, temp, Operand::From(2 * kHeapReferenceSize));
7885 __ Sub(RegisterFrom(maybe_temp2_loc), RegisterFrom(maybe_temp2_loc), 2);
7886 // Compare the classes and continue the loop if they do not match.
7887 __ Cmp(cls, RegisterFrom(maybe_temp3_loc));
Andreas Gampe3db70682018-12-26 15:12:03 -08007888 __ B(ne, &start_loop, /* is_far_target= */ false);
Artem Serov657022c2016-11-23 14:19:38 +00007889 break;
7890 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007891
7892 case TypeCheckKind::kBitstringCheck: {
7893 // /* HeapReference<Class> */ temp = obj->klass_
7894 GenerateReferenceLoadTwoRegisters(instruction,
7895 temp_loc,
7896 obj_loc,
7897 class_offset,
7898 maybe_temp2_loc,
7899 kWithoutReadBarrier);
7900
7901 GenerateBitstringTypeCheckCompare(instruction, temp, SetFlags);
7902 __ B(ne, type_check_slow_path->GetEntryLabel());
7903 break;
7904 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007905 }
Anton Kirilov6f644202017-02-27 18:29:45 +00007906 if (done.IsReferenced()) {
7907 __ Bind(&done);
7908 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01007909
7910 __ Bind(type_check_slow_path->GetExitLabel());
7911}
7912
Artem Serov551b28f2016-10-18 19:11:30 +01007913void LocationsBuilderARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007914 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7915 instruction, LocationSummary::kCallOnMainOnly);
Artem Serov551b28f2016-10-18 19:11:30 +01007916 InvokeRuntimeCallingConventionARMVIXL calling_convention;
7917 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
7918}
7919
7920void InstructionCodeGeneratorARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
7921 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
7922 instruction,
7923 instruction->GetDexPc());
7924 if (instruction->IsEnter()) {
7925 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7926 } else {
7927 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7928 }
Andreas Gampe3db70682018-12-26 15:12:03 -08007929 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 18);
Artem Serov551b28f2016-10-18 19:11:30 +01007930}
7931
Artem Serov02109dd2016-09-23 17:17:54 +01007932void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) {
7933 HandleBitwiseOperation(instruction, AND);
7934}
7935
7936void LocationsBuilderARMVIXL::VisitOr(HOr* instruction) {
7937 HandleBitwiseOperation(instruction, ORR);
7938}
7939
7940void LocationsBuilderARMVIXL::VisitXor(HXor* instruction) {
7941 HandleBitwiseOperation(instruction, EOR);
7942}
7943
7944void LocationsBuilderARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) {
7945 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007946 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007947 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7948 || instruction->GetResultType() == DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01007949 // Note: GVN reorders commutative operations to have the constant on the right hand side.
7950 locations->SetInAt(0, Location::RequiresRegister());
7951 locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode));
7952 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7953}
7954
7955void InstructionCodeGeneratorARMVIXL::VisitAnd(HAnd* instruction) {
7956 HandleBitwiseOperation(instruction);
7957}
7958
7959void InstructionCodeGeneratorARMVIXL::VisitOr(HOr* instruction) {
7960 HandleBitwiseOperation(instruction);
7961}
7962
7963void InstructionCodeGeneratorARMVIXL::VisitXor(HXor* instruction) {
7964 HandleBitwiseOperation(instruction);
7965}
7966
Artem Serov2bbc9532016-10-21 11:51:50 +01007967void LocationsBuilderARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
7968 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007969 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007970 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7971 || instruction->GetResultType() == DataType::Type::kInt64);
Artem Serov2bbc9532016-10-21 11:51:50 +01007972
7973 locations->SetInAt(0, Location::RequiresRegister());
7974 locations->SetInAt(1, Location::RequiresRegister());
7975 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7976}
7977
7978void InstructionCodeGeneratorARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
7979 LocationSummary* locations = instruction->GetLocations();
7980 Location first = locations->InAt(0);
7981 Location second = locations->InAt(1);
7982 Location out = locations->Out();
7983
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007984 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov2bbc9532016-10-21 11:51:50 +01007985 vixl32::Register first_reg = RegisterFrom(first);
7986 vixl32::Register second_reg = RegisterFrom(second);
7987 vixl32::Register out_reg = RegisterFrom(out);
7988
7989 switch (instruction->GetOpKind()) {
7990 case HInstruction::kAnd:
7991 __ Bic(out_reg, first_reg, second_reg);
7992 break;
7993 case HInstruction::kOr:
7994 __ Orn(out_reg, first_reg, second_reg);
7995 break;
7996 // There is no EON on arm.
7997 case HInstruction::kXor:
7998 default:
7999 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8000 UNREACHABLE();
8001 }
8002 return;
8003
8004 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008005 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov2bbc9532016-10-21 11:51:50 +01008006 vixl32::Register first_low = LowRegisterFrom(first);
8007 vixl32::Register first_high = HighRegisterFrom(first);
8008 vixl32::Register second_low = LowRegisterFrom(second);
8009 vixl32::Register second_high = HighRegisterFrom(second);
8010 vixl32::Register out_low = LowRegisterFrom(out);
8011 vixl32::Register out_high = HighRegisterFrom(out);
8012
8013 switch (instruction->GetOpKind()) {
8014 case HInstruction::kAnd:
8015 __ Bic(out_low, first_low, second_low);
8016 __ Bic(out_high, first_high, second_high);
8017 break;
8018 case HInstruction::kOr:
8019 __ Orn(out_low, first_low, second_low);
8020 __ Orn(out_high, first_high, second_high);
8021 break;
8022 // There is no EON on arm.
8023 case HInstruction::kXor:
8024 default:
8025 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8026 UNREACHABLE();
8027 }
8028 }
8029}
8030
Anton Kirilov74234da2017-01-13 14:42:47 +00008031void LocationsBuilderARMVIXL::VisitDataProcWithShifterOp(
8032 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008033 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
8034 instruction->GetType() == DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00008035 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008036 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008037 const bool overlap = instruction->GetType() == DataType::Type::kInt64 &&
Anton Kirilov74234da2017-01-13 14:42:47 +00008038 HDataProcWithShifterOp::IsExtensionOp(instruction->GetOpKind());
8039
8040 locations->SetInAt(0, Location::RequiresRegister());
8041 locations->SetInAt(1, Location::RequiresRegister());
8042 locations->SetOut(Location::RequiresRegister(),
8043 overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap);
8044}
8045
8046void InstructionCodeGeneratorARMVIXL::VisitDataProcWithShifterOp(
8047 HDataProcWithShifterOp* instruction) {
8048 const LocationSummary* const locations = instruction->GetLocations();
8049 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
8050 const HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
8051
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008052 if (instruction->GetType() == DataType::Type::kInt32) {
Anton Kirilov420ee302017-02-21 18:10:26 +00008053 const vixl32::Register first = InputRegisterAt(instruction, 0);
8054 const vixl32::Register output = OutputRegister(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008055 const vixl32::Register second = instruction->InputAt(1)->GetType() == DataType::Type::kInt64
Anton Kirilov74234da2017-01-13 14:42:47 +00008056 ? LowRegisterFrom(locations->InAt(1))
8057 : InputRegisterAt(instruction, 1);
8058
Anton Kirilov420ee302017-02-21 18:10:26 +00008059 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
8060 DCHECK_EQ(kind, HInstruction::kAdd);
8061
8062 switch (op_kind) {
8063 case HDataProcWithShifterOp::kUXTB:
8064 __ Uxtab(output, first, second);
8065 break;
8066 case HDataProcWithShifterOp::kUXTH:
8067 __ Uxtah(output, first, second);
8068 break;
8069 case HDataProcWithShifterOp::kSXTB:
8070 __ Sxtab(output, first, second);
8071 break;
8072 case HDataProcWithShifterOp::kSXTH:
8073 __ Sxtah(output, first, second);
8074 break;
8075 default:
8076 LOG(FATAL) << "Unexpected operation kind: " << op_kind;
8077 UNREACHABLE();
8078 }
8079 } else {
8080 GenerateDataProcInstruction(kind,
8081 output,
8082 first,
8083 Operand(second,
8084 ShiftFromOpKind(op_kind),
8085 instruction->GetShiftAmount()),
8086 codegen_);
8087 }
Anton Kirilov74234da2017-01-13 14:42:47 +00008088 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008089 DCHECK_EQ(instruction->GetType(), DataType::Type::kInt64);
Anton Kirilov74234da2017-01-13 14:42:47 +00008090
8091 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
8092 const vixl32::Register second = InputRegisterAt(instruction, 1);
8093
8094 DCHECK(!LowRegisterFrom(locations->Out()).Is(second));
8095 GenerateDataProc(kind,
8096 locations->Out(),
8097 locations->InAt(0),
8098 second,
8099 Operand(second, ShiftType::ASR, 31),
8100 codegen_);
8101 } else {
8102 GenerateLongDataProc(instruction, codegen_);
8103 }
8104 }
8105}
8106
Artem Serov02109dd2016-09-23 17:17:54 +01008107// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8108void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out,
8109 vixl32::Register first,
8110 uint32_t value) {
8111 // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier).
8112 if (value == 0xffffffffu) {
8113 if (!out.Is(first)) {
8114 __ Mov(out, first);
8115 }
8116 return;
8117 }
8118 if (value == 0u) {
8119 __ Mov(out, 0);
8120 return;
8121 }
8122 if (GetAssembler()->ShifterOperandCanHold(AND, value)) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008123 __ And(out, first, value);
8124 } else if (GetAssembler()->ShifterOperandCanHold(BIC, ~value)) {
8125 __ Bic(out, first, ~value);
Artem Serov02109dd2016-09-23 17:17:54 +01008126 } else {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008127 DCHECK(IsPowerOfTwo(value + 1));
8128 __ Ubfx(out, first, 0, WhichPowerOf2(value + 1));
Artem Serov02109dd2016-09-23 17:17:54 +01008129 }
8130}
8131
8132// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8133void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out,
8134 vixl32::Register first,
8135 uint32_t value) {
8136 // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier).
8137 if (value == 0u) {
8138 if (!out.Is(first)) {
8139 __ Mov(out, first);
8140 }
8141 return;
8142 }
8143 if (value == 0xffffffffu) {
8144 __ Mvn(out, 0);
8145 return;
8146 }
8147 if (GetAssembler()->ShifterOperandCanHold(ORR, value)) {
8148 __ Orr(out, first, value);
8149 } else {
8150 DCHECK(GetAssembler()->ShifterOperandCanHold(ORN, ~value));
8151 __ Orn(out, first, ~value);
8152 }
8153}
8154
8155// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
8156void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out,
8157 vixl32::Register first,
8158 uint32_t value) {
8159 // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier).
8160 if (value == 0u) {
8161 if (!out.Is(first)) {
8162 __ Mov(out, first);
8163 }
8164 return;
8165 }
8166 __ Eor(out, first, value);
8167}
8168
Anton Kirilovdda43962016-11-21 19:55:20 +00008169void InstructionCodeGeneratorARMVIXL::GenerateAddLongConst(Location out,
8170 Location first,
8171 uint64_t value) {
8172 vixl32::Register out_low = LowRegisterFrom(out);
8173 vixl32::Register out_high = HighRegisterFrom(out);
8174 vixl32::Register first_low = LowRegisterFrom(first);
8175 vixl32::Register first_high = HighRegisterFrom(first);
8176 uint32_t value_low = Low32Bits(value);
8177 uint32_t value_high = High32Bits(value);
8178 if (value_low == 0u) {
8179 if (!out_low.Is(first_low)) {
8180 __ Mov(out_low, first_low);
8181 }
8182 __ Add(out_high, first_high, value_high);
8183 return;
8184 }
8185 __ Adds(out_low, first_low, value_low);
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00008186 if (GetAssembler()->ShifterOperandCanHold(ADC, value_high)) {
Anton Kirilovdda43962016-11-21 19:55:20 +00008187 __ Adc(out_high, first_high, value_high);
Anton Kirilovdda43962016-11-21 19:55:20 +00008188 } else {
Vladimir Markof0a6a1d2018-01-08 14:23:56 +00008189 DCHECK(GetAssembler()->ShifterOperandCanHold(SBC, ~value_high));
8190 __ Sbc(out_high, first_high, ~value_high);
Anton Kirilovdda43962016-11-21 19:55:20 +00008191 }
8192}
8193
Artem Serov02109dd2016-09-23 17:17:54 +01008194void InstructionCodeGeneratorARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction) {
8195 LocationSummary* locations = instruction->GetLocations();
8196 Location first = locations->InAt(0);
8197 Location second = locations->InAt(1);
8198 Location out = locations->Out();
8199
8200 if (second.IsConstant()) {
8201 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
8202 uint32_t value_low = Low32Bits(value);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008203 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov02109dd2016-09-23 17:17:54 +01008204 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
8205 vixl32::Register out_reg = OutputRegister(instruction);
8206 if (instruction->IsAnd()) {
8207 GenerateAndConst(out_reg, first_reg, value_low);
8208 } else if (instruction->IsOr()) {
8209 GenerateOrrConst(out_reg, first_reg, value_low);
8210 } else {
8211 DCHECK(instruction->IsXor());
8212 GenerateEorConst(out_reg, first_reg, value_low);
8213 }
8214 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008215 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01008216 uint32_t value_high = High32Bits(value);
8217 vixl32::Register first_low = LowRegisterFrom(first);
8218 vixl32::Register first_high = HighRegisterFrom(first);
8219 vixl32::Register out_low = LowRegisterFrom(out);
8220 vixl32::Register out_high = HighRegisterFrom(out);
8221 if (instruction->IsAnd()) {
8222 GenerateAndConst(out_low, first_low, value_low);
8223 GenerateAndConst(out_high, first_high, value_high);
8224 } else if (instruction->IsOr()) {
8225 GenerateOrrConst(out_low, first_low, value_low);
8226 GenerateOrrConst(out_high, first_high, value_high);
8227 } else {
8228 DCHECK(instruction->IsXor());
8229 GenerateEorConst(out_low, first_low, value_low);
8230 GenerateEorConst(out_high, first_high, value_high);
8231 }
8232 }
8233 return;
8234 }
8235
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008236 if (instruction->GetResultType() == DataType::Type::kInt32) {
Artem Serov02109dd2016-09-23 17:17:54 +01008237 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
8238 vixl32::Register second_reg = InputRegisterAt(instruction, 1);
8239 vixl32::Register out_reg = OutputRegister(instruction);
8240 if (instruction->IsAnd()) {
8241 __ And(out_reg, first_reg, second_reg);
8242 } else if (instruction->IsOr()) {
8243 __ Orr(out_reg, first_reg, second_reg);
8244 } else {
8245 DCHECK(instruction->IsXor());
8246 __ Eor(out_reg, first_reg, second_reg);
8247 }
8248 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008249 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Artem Serov02109dd2016-09-23 17:17:54 +01008250 vixl32::Register first_low = LowRegisterFrom(first);
8251 vixl32::Register first_high = HighRegisterFrom(first);
8252 vixl32::Register second_low = LowRegisterFrom(second);
8253 vixl32::Register second_high = HighRegisterFrom(second);
8254 vixl32::Register out_low = LowRegisterFrom(out);
8255 vixl32::Register out_high = HighRegisterFrom(out);
8256 if (instruction->IsAnd()) {
8257 __ And(out_low, first_low, second_low);
8258 __ And(out_high, first_high, second_high);
8259 } else if (instruction->IsOr()) {
8260 __ Orr(out_low, first_low, second_low);
8261 __ Orr(out_high, first_high, second_high);
8262 } else {
8263 DCHECK(instruction->IsXor());
8264 __ Eor(out_low, first_low, second_low);
8265 __ Eor(out_high, first_high, second_high);
8266 }
8267 }
8268}
8269
Artem Serovcfbe9132016-10-14 15:58:56 +01008270void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadOneRegister(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008271 HInstruction* instruction,
Artem Serovcfbe9132016-10-14 15:58:56 +01008272 Location out,
8273 uint32_t offset,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008274 Location maybe_temp,
8275 ReadBarrierOption read_barrier_option) {
Artem Serovcfbe9132016-10-14 15:58:56 +01008276 vixl32::Register out_reg = RegisterFrom(out);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008277 if (read_barrier_option == kWithReadBarrier) {
8278 CHECK(kEmitCompilerReadBarrier);
8279 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
8280 if (kUseBakerReadBarrier) {
8281 // Load with fast path based Baker's read barrier.
8282 // /* HeapReference<Object> */ out = *(out + offset)
8283 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008284 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check= */ false);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008285 } else {
8286 // Load with slow path based read barrier.
8287 // Save the value of `out` into `maybe_temp` before overwriting it
8288 // in the following move operation, as we will need it for the
8289 // read barrier below.
8290 __ Mov(RegisterFrom(maybe_temp), out_reg);
8291 // /* HeapReference<Object> */ out = *(out + offset)
8292 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
8293 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
8294 }
Artem Serovcfbe9132016-10-14 15:58:56 +01008295 } else {
8296 // Plain load with no read barrier.
8297 // /* HeapReference<Object> */ out = *(out + offset)
8298 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
8299 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
8300 }
8301}
8302
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008303void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadTwoRegisters(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008304 HInstruction* instruction,
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008305 Location out,
8306 Location obj,
8307 uint32_t offset,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008308 Location maybe_temp,
8309 ReadBarrierOption read_barrier_option) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008310 vixl32::Register out_reg = RegisterFrom(out);
8311 vixl32::Register obj_reg = RegisterFrom(obj);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008312 if (read_barrier_option == kWithReadBarrier) {
8313 CHECK(kEmitCompilerReadBarrier);
8314 if (kUseBakerReadBarrier) {
8315 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
8316 // Load with fast path based Baker's read barrier.
8317 // /* HeapReference<Object> */ out = *(obj + offset)
8318 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08008319 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check= */ false);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008320 } else {
8321 // Load with slow path based read barrier.
8322 // /* HeapReference<Object> */ out = *(obj + offset)
8323 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8324 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
8325 }
Anton Kirilove28d9ae2016-10-25 18:17:23 +01008326 } else {
8327 // Plain load with no read barrier.
8328 // /* HeapReference<Object> */ out = *(obj + offset)
8329 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8330 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
8331 }
8332}
8333
Vladimir Markoca1e0382018-04-11 09:58:41 +00008334void CodeGeneratorARMVIXL::GenerateGcRootFieldLoad(
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008335 HInstruction* instruction,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008336 Location root,
8337 vixl32::Register obj,
8338 uint32_t offset,
Artem Serovd4cc5b22016-11-04 11:19:09 +00008339 ReadBarrierOption read_barrier_option) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008340 vixl32::Register root_reg = RegisterFrom(root);
Artem Serovd4cc5b22016-11-04 11:19:09 +00008341 if (read_barrier_option == kWithReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008342 DCHECK(kEmitCompilerReadBarrier);
8343 if (kUseBakerReadBarrier) {
8344 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00008345 // Baker's read barrier are used.
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008346
Vladimir Marko008e09f32018-08-06 15:42:43 +01008347 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
8348 // the Marking Register) to decide whether we need to enter
8349 // the slow path to mark the GC root.
8350 //
8351 // We use shared thunks for the slow path; shared within the method
8352 // for JIT, across methods for AOT. That thunk checks the reference
8353 // and jumps to the entrypoint if needed.
8354 //
8355 // lr = &return_address;
8356 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
8357 // if (mr) { // Thread::Current()->GetIsGcMarking()
8358 // goto gc_root_thunk<root_reg>(lr)
8359 // }
8360 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00008361
Vladimir Marko008e09f32018-08-06 15:42:43 +01008362 UseScratchRegisterScope temps(GetVIXLAssembler());
8363 temps.Exclude(ip);
8364 bool narrow = CanEmitNarrowLdr(root_reg, obj, offset);
8365 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode(), narrow);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008366
Vladimir Markod887ed82018-08-14 13:52:12 +00008367 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u) + /* LDR */ (narrow ? 1u : 0u);
8368 size_t wide_instructions = /* ADR+CMP+LDR+BNE */ 4u - narrow_instructions;
8369 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8370 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8371 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008372 vixl32::Label return_address;
8373 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8374 __ cmp(mr, Operand(0));
8375 // Currently the offset is always within range. If that changes,
8376 // we shall have to split the load the same way as for fields.
8377 DCHECK_LT(offset, kReferenceLoadMinFarOffset);
8378 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
8379 __ ldr(EncodingSize(narrow ? Narrow : Wide), root_reg, MemOperand(obj, offset));
8380 EmitBakerReadBarrierBne(custom_data);
Vladimir Markod887ed82018-08-14 13:52:12 +00008381 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008382 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8383 narrow ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_OFFSET
8384 : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_OFFSET);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008385 } else {
8386 // GC root loaded through a slow path for read barriers other
8387 // than Baker's.
8388 // /* GcRoot<mirror::Object>* */ root = obj + offset
8389 __ Add(root_reg, obj, offset);
8390 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00008391 GenerateReadBarrierForRootSlow(instruction, root, root);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008392 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008393 } else {
8394 // Plain GC root load with no read barrier.
8395 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
8396 GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset);
8397 // Note that GC roots are not affected by heap poisoning, thus we
8398 // do not have to unpoison `root_reg` here.
8399 }
Andreas Gampe3db70682018-12-26 15:12:03 -08008400 MaybeGenerateMarkingRegisterCheck(/* code= */ 19);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008401}
8402
Vladimir Markod887ed82018-08-14 13:52:12 +00008403void CodeGeneratorARMVIXL::GenerateUnsafeCasOldValueAddWithBakerReadBarrier(
8404 vixl::aarch32::Register old_value,
8405 vixl::aarch32::Register adjusted_old_value,
8406 vixl::aarch32::Register expected) {
8407 DCHECK(kEmitCompilerReadBarrier);
8408 DCHECK(kUseBakerReadBarrier);
8409
8410 // Similar to the Baker RB path in GenerateGcRootFieldLoad(), with an ADD instead of LDR.
8411 uint32_t custom_data = EncodeBakerReadBarrierUnsafeCasData(old_value.GetCode());
8412
8413 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u);
8414 size_t wide_instructions = /* ADR+CMP+ADD+BNE */ 4u - narrow_instructions;
8415 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8416 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8417 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
8418 vixl32::Label return_address;
8419 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8420 __ cmp(mr, Operand(0));
8421 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
8422 __ add(EncodingSize(Wide), old_value, adjusted_old_value, Operand(expected)); // Preserves flags.
8423 EmitBakerReadBarrierBne(custom_data);
8424 __ bind(&return_address);
8425 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8426 BAKER_MARK_INTROSPECTION_UNSAFE_CAS_ADD_OFFSET);
8427}
8428
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008429void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8430 Location ref,
8431 vixl32::Register obj,
Vladimir Marko248141f2018-08-10 10:40:07 +01008432 const vixl32::MemOperand& src,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008433 bool needs_null_check) {
8434 DCHECK(kEmitCompilerReadBarrier);
8435 DCHECK(kUseBakerReadBarrier);
8436
Vladimir Marko008e09f32018-08-06 15:42:43 +01008437 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
8438 // Marking Register) to decide whether we need to enter the slow
8439 // path to mark the reference. Then, in the slow path, check the
8440 // gray bit in the lock word of the reference's holder (`obj`) to
8441 // decide whether to mark `ref` or not.
8442 //
8443 // We use shared thunks for the slow path; shared within the method
8444 // for JIT, across methods for AOT. That thunk checks the holder
8445 // and jumps to the entrypoint if needed. If the holder is not gray,
8446 // it creates a fake dependency and returns to the LDR instruction.
8447 //
8448 // lr = &gray_return_address;
8449 // if (mr) { // Thread::Current()->GetIsGcMarking()
8450 // goto field_thunk<holder_reg, base_reg>(lr)
8451 // }
8452 // not_gray_return_address:
8453 // // Original reference load. If the offset is too large to fit
8454 // // into LDR, we use an adjusted base register here.
8455 // HeapReference<mirror::Object> reference = *(obj+offset);
8456 // gray_return_address:
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008457
Vladimir Marko248141f2018-08-10 10:40:07 +01008458 DCHECK(src.GetAddrMode() == vixl32::Offset);
8459 DCHECK_ALIGNED(src.GetOffsetImmediate(), sizeof(mirror::HeapReference<mirror::Object>));
Vladimir Marko008e09f32018-08-06 15:42:43 +01008460 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko248141f2018-08-10 10:40:07 +01008461 bool narrow = CanEmitNarrowLdr(ref_reg, src.GetBaseRegister(), src.GetOffsetImmediate());
8462
Vladimir Marko008e09f32018-08-06 15:42:43 +01008463 UseScratchRegisterScope temps(GetVIXLAssembler());
8464 temps.Exclude(ip);
Vladimir Marko248141f2018-08-10 10:40:07 +01008465 uint32_t custom_data =
8466 EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode(), narrow);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008467
Vladimir Marko008e09f32018-08-06 15:42:43 +01008468 {
Vladimir Markod887ed82018-08-14 13:52:12 +00008469 size_t narrow_instructions =
8470 /* CMP */ (mr.IsLow() ? 1u : 0u) +
8471 /* LDR+unpoison? */ (narrow ? (kPoisonHeapReferences ? 2u : 1u) : 0u);
8472 size_t wide_instructions =
8473 /* ADR+CMP+LDR+BNE+unpoison? */ (kPoisonHeapReferences ? 5u : 4u) - narrow_instructions;
8474 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8475 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8476 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008477 vixl32::Label return_address;
8478 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8479 __ cmp(mr, Operand(0));
8480 EmitBakerReadBarrierBne(custom_data);
8481 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
Vladimir Marko248141f2018-08-10 10:40:07 +01008482 __ ldr(EncodingSize(narrow ? Narrow : Wide), ref_reg, src);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008483 if (needs_null_check) {
8484 MaybeRecordImplicitNullCheck(instruction);
8485 }
8486 // Note: We need a specific width for the unpoisoning NEG.
8487 if (kPoisonHeapReferences) {
8488 if (narrow) {
8489 // The only 16-bit encoding is T1 which sets flags outside IT block (i.e. RSBS, not RSB).
8490 __ rsbs(EncodingSize(Narrow), ref_reg, ref_reg, Operand(0));
8491 } else {
8492 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0));
8493 }
8494 }
Vladimir Markod887ed82018-08-14 13:52:12 +00008495 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008496 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8497 narrow ? BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET
8498 : BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET);
8499 }
Andreas Gampe3db70682018-12-26 15:12:03 -08008500 MaybeGenerateMarkingRegisterCheck(/* code= */ 20, /* temp_loc= */ LocationFrom(ip));
Roland Levillain6070e882016-11-03 17:51:58 +00008501}
8502
Vladimir Marko248141f2018-08-10 10:40:07 +01008503void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8504 Location ref,
8505 vixl32::Register obj,
8506 uint32_t offset,
8507 Location temp,
8508 bool needs_null_check) {
8509 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
8510 vixl32::Register base = obj;
8511 if (offset >= kReferenceLoadMinFarOffset) {
8512 base = RegisterFrom(temp);
8513 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
8514 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
8515 offset &= (kReferenceLoadMinFarOffset - 1u);
8516 }
8517 GenerateFieldLoadWithBakerReadBarrier(
8518 instruction, ref, obj, MemOperand(base, offset), needs_null_check);
8519}
8520
Vladimir Marko008e09f32018-08-06 15:42:43 +01008521void CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008522 vixl32::Register obj,
8523 uint32_t data_offset,
8524 Location index,
8525 Location temp,
8526 bool needs_null_check) {
8527 DCHECK(kEmitCompilerReadBarrier);
8528 DCHECK(kUseBakerReadBarrier);
8529
8530 static_assert(
8531 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
8532 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008533 ScaleFactor scale_factor = TIMES_4;
8534
Vladimir Marko008e09f32018-08-06 15:42:43 +01008535 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
8536 // Marking Register) to decide whether we need to enter the slow
8537 // path to mark the reference. Then, in the slow path, check the
8538 // gray bit in the lock word of the reference's holder (`obj`) to
8539 // decide whether to mark `ref` or not.
8540 //
8541 // We use shared thunks for the slow path; shared within the method
8542 // for JIT, across methods for AOT. That thunk checks the holder
8543 // and jumps to the entrypoint if needed. If the holder is not gray,
8544 // it creates a fake dependency and returns to the LDR instruction.
8545 //
8546 // lr = &gray_return_address;
8547 // if (mr) { // Thread::Current()->GetIsGcMarking()
8548 // goto array_thunk<base_reg>(lr)
8549 // }
8550 // not_gray_return_address:
8551 // // Original reference load. If the offset is too large to fit
8552 // // into LDR, we use an adjusted base register here.
8553 // HeapReference<mirror::Object> reference = data[index];
8554 // gray_return_address:
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008555
Vladimir Marko008e09f32018-08-06 15:42:43 +01008556 DCHECK(index.IsValid());
8557 vixl32::Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
8558 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
8559 vixl32::Register data_reg = RegisterFrom(temp, DataType::Type::kInt32); // Raw pointer.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008560
Vladimir Marko008e09f32018-08-06 15:42:43 +01008561 UseScratchRegisterScope temps(GetVIXLAssembler());
8562 temps.Exclude(ip);
8563 uint32_t custom_data = EncodeBakerReadBarrierArrayData(data_reg.GetCode());
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008564
Vladimir Marko008e09f32018-08-06 15:42:43 +01008565 __ Add(data_reg, obj, Operand(data_offset));
8566 {
Vladimir Markod887ed82018-08-14 13:52:12 +00008567 size_t narrow_instructions = /* CMP */ (mr.IsLow() ? 1u : 0u);
8568 size_t wide_instructions =
8569 /* ADR+CMP+BNE+LDR+unpoison? */ (kPoisonHeapReferences ? 5u : 4u) - narrow_instructions;
8570 size_t exact_size = wide_instructions * vixl32::k32BitT32InstructionSizeInBytes +
8571 narrow_instructions * vixl32::k16BitT32InstructionSizeInBytes;
8572 ExactAssemblyScope guard(GetVIXLAssembler(), exact_size);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008573 vixl32::Label return_address;
8574 EmitAdrCode adr(GetVIXLAssembler(), lr, &return_address);
8575 __ cmp(mr, Operand(0));
8576 EmitBakerReadBarrierBne(custom_data);
8577 ptrdiff_t old_offset = GetVIXLAssembler()->GetBuffer()->GetCursorOffset();
8578 __ ldr(ref_reg, MemOperand(data_reg, index_reg, vixl32::LSL, scale_factor));
8579 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
8580 // Note: We need a Wide NEG for the unpoisoning.
8581 if (kPoisonHeapReferences) {
8582 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008583 }
Vladimir Markod887ed82018-08-14 13:52:12 +00008584 __ bind(&return_address);
Vladimir Marko008e09f32018-08-06 15:42:43 +01008585 DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(),
8586 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008587 }
Andreas Gampe3db70682018-12-26 15:12:03 -08008588 MaybeGenerateMarkingRegisterCheck(/* code= */ 21, /* temp_loc= */ LocationFrom(ip));
Roland Levillain6070e882016-11-03 17:51:58 +00008589}
8590
Roland Levillain5daa4952017-07-03 17:23:56 +01008591void CodeGeneratorARMVIXL::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
8592 // The following condition is a compile-time one, so it does not have a run-time cost.
8593 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
8594 // The following condition is a run-time one; it is executed after the
8595 // previous compile-time test, to avoid penalizing non-debug builds.
8596 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
8597 UseScratchRegisterScope temps(GetVIXLAssembler());
8598 vixl32::Register temp = temp_loc.IsValid() ? RegisterFrom(temp_loc) : temps.Acquire();
8599 GetAssembler()->GenerateMarkingRegisterCheck(temp,
8600 kMarkingRegisterCheckBreakCodeBaseCode + code);
8601 }
8602 }
8603}
8604
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008605void CodeGeneratorARMVIXL::GenerateReadBarrierSlow(HInstruction* instruction,
8606 Location out,
8607 Location ref,
8608 Location obj,
8609 uint32_t offset,
8610 Location index) {
8611 DCHECK(kEmitCompilerReadBarrier);
8612
8613 // Insert a slow path based read barrier *after* the reference load.
8614 //
8615 // If heap poisoning is enabled, the unpoisoning of the loaded
8616 // reference will be carried out by the runtime within the slow
8617 // path.
8618 //
8619 // Note that `ref` currently does not get unpoisoned (when heap
8620 // poisoning is enabled), which is alright as the `ref` argument is
8621 // not used by the artReadBarrierSlow entry point.
8622 //
8623 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01008624 SlowPathCodeARMVIXL* slow_path = new (GetScopedAllocator())
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008625 ReadBarrierForHeapReferenceSlowPathARMVIXL(instruction, out, ref, obj, offset, index);
8626 AddSlowPath(slow_path);
8627
8628 __ B(slow_path->GetEntryLabel());
8629 __ Bind(slow_path->GetExitLabel());
8630}
8631
8632void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
Artem Serov02d37832016-10-25 15:25:33 +01008633 Location out,
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008634 Location ref,
8635 Location obj,
8636 uint32_t offset,
8637 Location index) {
Artem Serov02d37832016-10-25 15:25:33 +01008638 if (kEmitCompilerReadBarrier) {
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008639 // Baker's read barriers shall be handled by the fast path
Roland Levillain9983e302017-07-14 14:34:22 +01008640 // (CodeGeneratorARMVIXL::GenerateReferenceLoadWithBakerReadBarrier).
Artem Serov02d37832016-10-25 15:25:33 +01008641 DCHECK(!kUseBakerReadBarrier);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008642 // If heap poisoning is enabled, unpoisoning will be taken care of
8643 // by the runtime within the slow path.
8644 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Artem Serov02d37832016-10-25 15:25:33 +01008645 } else if (kPoisonHeapReferences) {
8646 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
8647 }
8648}
8649
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008650void CodeGeneratorARMVIXL::GenerateReadBarrierForRootSlow(HInstruction* instruction,
8651 Location out,
8652 Location root) {
8653 DCHECK(kEmitCompilerReadBarrier);
8654
8655 // Insert a slow path based read barrier *after* the GC root load.
8656 //
8657 // Note that GC roots are not affected by heap poisoning, so we do
8658 // not need to do anything special for this here.
8659 SlowPathCodeARMVIXL* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01008660 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARMVIXL(instruction, out, root);
Anton Kirilovedb2ac32016-11-30 15:14:10 +00008661 AddSlowPath(slow_path);
8662
8663 __ B(slow_path->GetEntryLabel());
8664 __ Bind(slow_path->GetExitLabel());
8665}
8666
Artem Serov02d37832016-10-25 15:25:33 +01008667// Check if the desired_dispatch_info is supported. If it is, return it,
8668// otherwise return a fall-back info that should be used instead.
8669HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
Artem Serovd4cc5b22016-11-04 11:19:09 +00008670 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01008671 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffraye807ff72017-01-23 09:03:12 +00008672 return desired_dispatch_info;
Artem Serov02d37832016-10-25 15:25:33 +01008673}
8674
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008675vixl32::Register CodeGeneratorARMVIXL::GetInvokeStaticOrDirectExtraParameter(
8676 HInvokeStaticOrDirect* invoke, vixl32::Register temp) {
8677 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
8678 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
8679 if (!invoke->GetLocations()->Intrinsified()) {
8680 return RegisterFrom(location);
8681 }
8682 // For intrinsics we allow any location, so it may be on the stack.
8683 if (!location.IsRegister()) {
8684 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, location.GetStackIndex());
8685 return temp;
8686 }
8687 // For register locations, check if the register was saved. If so, get it from the stack.
8688 // Note: There is a chance that the register was saved but not overwritten, so we could
8689 // save one load. However, since this is just an intrinsic slow path we prefer this
8690 // simple and more robust approach rather that trying to determine if that's the case.
8691 SlowPathCode* slow_path = GetCurrentSlowPath();
Scott Wakelingd5cd4972017-02-03 11:38:35 +00008692 if (slow_path != nullptr && slow_path->IsCoreRegisterSaved(RegisterFrom(location).GetCode())) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008693 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(RegisterFrom(location).GetCode());
8694 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, stack_offset);
8695 return temp;
8696 }
8697 return RegisterFrom(location);
8698}
8699
Vladimir Markod254f5c2017-06-02 15:18:36 +00008700void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008701 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00008702 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008703 switch (invoke->GetMethodLoadKind()) {
8704 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
8705 uint32_t offset =
8706 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
8707 // temp = thread->string_init_entrypoint
Artem Serovd4cc5b22016-11-04 11:19:09 +00008708 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(temp), tr, offset);
8709 break;
8710 }
8711 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
8712 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
8713 break;
Vladimir Marko65979462017-05-19 17:25:12 +01008714 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
8715 DCHECK(GetCompilerOptions().IsBootImage());
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008716 PcRelativePatchInfo* labels = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01008717 vixl32::Register temp_reg = RegisterFrom(temp);
8718 EmitMovwMovtPlaceholder(labels, temp_reg);
8719 break;
8720 }
Vladimir Markob066d432018-01-03 13:14:37 +00008721 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00008722 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00008723 PcRelativePatchInfo* labels = NewBootImageRelRoPatch(boot_image_offset);
8724 vixl32::Register temp_reg = RegisterFrom(temp);
8725 EmitMovwMovtPlaceholder(labels, temp_reg);
8726 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
8727 break;
8728 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01008729 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
8730 PcRelativePatchInfo* labels = NewMethodBssEntryPatch(
8731 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
8732 vixl32::Register temp_reg = RegisterFrom(temp);
8733 EmitMovwMovtPlaceholder(labels, temp_reg);
8734 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008735 break;
8736 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01008737 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
8738 __ Mov(RegisterFrom(temp), Operand::From(invoke->GetMethodAddress()));
8739 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008740 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
8741 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
8742 return; // No code pointer retrieval; the runtime performs the call directly.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008743 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008744 }
8745
Artem Serovd4cc5b22016-11-04 11:19:09 +00008746 switch (invoke->GetCodePtrLocation()) {
8747 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008748 {
8749 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
8750 ExactAssemblyScope aas(GetVIXLAssembler(),
8751 vixl32::k32BitT32InstructionSizeInBytes,
8752 CodeBufferCheckScope::kMaximumSize);
8753 __ bl(GetFrameEntryLabel());
8754 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
8755 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00008756 break;
Artem Serovd4cc5b22016-11-04 11:19:09 +00008757 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
8758 // LR = callee_method->entry_point_from_quick_compiled_code_
8759 GetAssembler()->LoadFromOffset(
8760 kLoadWord,
8761 lr,
8762 RegisterFrom(callee_method),
8763 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
Alexandre Rames374ddf32016-11-04 10:40:49 +00008764 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008765 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Alexandre Rames374ddf32016-11-04 10:40:49 +00008766 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
Artem Serov0fb37192016-12-06 18:13:40 +00008767 ExactAssemblyScope aas(GetVIXLAssembler(),
8768 vixl32::k16BitT32InstructionSizeInBytes,
8769 CodeBufferCheckScope::kExactSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00008770 // LR()
8771 __ blx(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008772 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexandre Rames374ddf32016-11-04 10:40:49 +00008773 }
Artem Serovd4cc5b22016-11-04 11:19:09 +00008774 break;
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008775 }
8776
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008777 DCHECK(!IsLeafMethod());
8778}
8779
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008780void CodeGeneratorARMVIXL::GenerateVirtualCall(
8781 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008782 vixl32::Register temp = RegisterFrom(temp_location);
8783 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
8784 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
8785
8786 // Use the calling convention instead of the location of the receiver, as
8787 // intrinsics may have put the receiver in a different register. In the intrinsics
8788 // slow path, the arguments have been moved to the right place, so here we are
8789 // guaranteed that the receiver is the first register of the calling convention.
8790 InvokeDexCallingConventionARMVIXL calling_convention;
8791 vixl32::Register receiver = calling_convention.GetRegisterAt(0);
8792 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Alexandre Rames374ddf32016-11-04 10:40:49 +00008793 {
8794 // Make sure the pc is recorded immediately after the `ldr` instruction.
Artem Serov0fb37192016-12-06 18:13:40 +00008795 ExactAssemblyScope aas(GetVIXLAssembler(),
8796 vixl32::kMaxInstructionSizeInBytes,
8797 CodeBufferCheckScope::kMaximumSize);
Alexandre Rames374ddf32016-11-04 10:40:49 +00008798 // /* HeapReference<Class> */ temp = receiver->klass_
8799 __ ldr(temp, MemOperand(receiver, class_offset));
8800 MaybeRecordImplicitNullCheck(invoke);
8801 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008802 // Instead of simply (possibly) unpoisoning `temp` here, we should
8803 // emit a read barrier for the previous class reference load.
8804 // However this is not required in practice, as this is an
8805 // intermediate/temporary reference and because the current
8806 // concurrent copying collector keeps the from-space memory
8807 // intact/accessible until the end of the marking phase (the
8808 // concurrent copying collector may not in the future).
8809 GetAssembler()->MaybeUnpoisonHeapReference(temp);
8810
8811 // temp = temp->GetMethodAt(method_offset);
8812 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
8813 kArmPointerSize).Int32Value();
8814 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
8815 // LR = temp->GetEntryPoint();
8816 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008817 {
8818 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
8819 // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
8820 ExactAssemblyScope aas(GetVIXLAssembler(),
8821 vixl32::k16BitT32InstructionSizeInBytes,
8822 CodeBufferCheckScope::kExactSize);
8823 // LR();
8824 __ blx(lr);
8825 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
8826 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01008827}
8828
Vladimir Marko6fd16062018-06-26 11:02:04 +01008829CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageIntrinsicPatch(
8830 uint32_t intrinsic_data) {
Andreas Gampe3db70682018-12-26 15:12:03 -08008831 return NewPcRelativePatch(/* dex_file= */ nullptr, intrinsic_data, &boot_image_intrinsic_patches_);
Vladimir Marko6fd16062018-06-26 11:02:04 +01008832}
8833
Vladimir Markob066d432018-01-03 13:14:37 +00008834CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageRelRoPatch(
8835 uint32_t boot_image_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08008836 return NewPcRelativePatch(/* dex_file= */ nullptr,
Vladimir Markob066d432018-01-03 13:14:37 +00008837 boot_image_offset,
8838 &boot_image_method_patches_);
8839}
8840
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008841CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01008842 MethodReference target_method) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008843 return NewPcRelativePatch(
8844 target_method.dex_file, target_method.index, &boot_image_method_patches_);
Artem Serovd4cc5b22016-11-04 11:19:09 +00008845}
8846
Vladimir Marko0eb882b2017-05-15 13:39:18 +01008847CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewMethodBssEntryPatch(
8848 MethodReference target_method) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008849 return NewPcRelativePatch(
8850 target_method.dex_file, target_method.index, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01008851}
8852
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008853CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageTypePatch(
Artem Serovd4cc5b22016-11-04 11:19:09 +00008854 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008855 return NewPcRelativePatch(&dex_file, type_index.index_, &boot_image_type_patches_);
Artem Serovd4cc5b22016-11-04 11:19:09 +00008856}
8857
Vladimir Marko1998cd02017-01-13 13:02:58 +00008858CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewTypeBssEntryPatch(
8859 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008860 return NewPcRelativePatch(&dex_file, type_index.index_, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00008861}
8862
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008863CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01008864 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008865 return NewPcRelativePatch(&dex_file, string_index.index_, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01008866}
8867
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01008868CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewStringBssEntryPatch(
8869 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008870 return NewPcRelativePatch(&dex_file, string_index.index_, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01008871}
8872
Artem Serovd4cc5b22016-11-04 11:19:09 +00008873CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008874 const DexFile* dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00008875 patches->emplace_back(dex_file, offset_or_index);
8876 return &patches->back();
8877}
8878
Vladimir Marko966b46f2018-08-03 10:20:19 +00008879void CodeGeneratorARMVIXL::EmitBakerReadBarrierBne(uint32_t custom_data) {
Vladimir Markod887ed82018-08-14 13:52:12 +00008880 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko966b46f2018-08-03 10:20:19 +00008881 if (Runtime::Current()->UseJitCompilation()) {
8882 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
8883 vixl::aarch32::Label* slow_path_entry = &it->second.label;
8884 __ b(ne, EncodingSize(Wide), slow_path_entry);
8885 } else {
8886 baker_read_barrier_patches_.emplace_back(custom_data);
8887 vixl::aarch32::Label* patch_label = &baker_read_barrier_patches_.back().label;
8888 __ bind(patch_label);
8889 vixl32::Label placeholder_label;
8890 __ b(ne, EncodingSize(Wide), &placeholder_label); // Placeholder, patched at link-time.
8891 __ bind(&placeholder_label);
8892 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008893}
8894
Artem Serovc5fcb442016-12-02 19:19:58 +00008895VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateBootImageAddressLiteral(uint32_t address) {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01008896 return DeduplicateUint32Literal(address, &uint32_literals_);
Artem Serovc5fcb442016-12-02 19:19:58 +00008897}
8898
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00008899VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitStringLiteral(
8900 const DexFile& dex_file,
8901 dex::StringIndex string_index,
8902 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01008903 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Artem Serovc5fcb442016-12-02 19:19:58 +00008904 return jit_string_patches_.GetOrCreate(
8905 StringReference(&dex_file, string_index),
8906 [this]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08008907 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
Artem Serovc5fcb442016-12-02 19:19:58 +00008908 });
8909}
8910
8911VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFile& dex_file,
8912 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00008913 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01008914 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Artem Serovc5fcb442016-12-02 19:19:58 +00008915 return jit_class_patches_.GetOrCreate(
8916 TypeReference(&dex_file, type_index),
8917 [this]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08008918 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u);
Artem Serovc5fcb442016-12-02 19:19:58 +00008919 });
8920}
8921
Vladimir Marko6fd16062018-06-26 11:02:04 +01008922void CodeGeneratorARMVIXL::LoadBootImageAddress(vixl32::Register reg,
8923 uint32_t boot_image_reference) {
8924 if (GetCompilerOptions().IsBootImage()) {
8925 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
8926 NewBootImageIntrinsicPatch(boot_image_reference);
8927 EmitMovwMovtPlaceholder(labels, reg);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01008928 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01008929 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
8930 NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01008931 EmitMovwMovtPlaceholder(labels, reg);
Andreas Gampe3db70682018-12-26 15:12:03 -08008932 __ Ldr(reg, MemOperand(reg, /* offset= */ 0));
Vladimir Markoeebb8212018-06-05 14:57:24 +01008933 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01008934 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01008935 gc::Heap* heap = Runtime::Current()->GetHeap();
8936 DCHECK(!heap->GetBootImageSpaces().empty());
8937 uintptr_t address =
Vladimir Marko6fd16062018-06-26 11:02:04 +01008938 reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01008939 __ Ldr(reg, DeduplicateBootImageAddressLiteral(dchecked_integral_cast<uint32_t>(address)));
8940 }
8941}
8942
Vladimir Marko6fd16062018-06-26 11:02:04 +01008943void CodeGeneratorARMVIXL::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
8944 uint32_t boot_image_offset) {
8945 DCHECK(invoke->IsStatic());
8946 InvokeRuntimeCallingConventionARMVIXL calling_convention;
8947 vixl32::Register argument = calling_convention.GetRegisterAt(0);
8948 if (GetCompilerOptions().IsBootImage()) {
8949 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
8950 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
8951 MethodReference target_method = invoke->GetTargetMethod();
8952 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
8953 PcRelativePatchInfo* labels = NewBootImageTypePatch(*target_method.dex_file, type_idx);
8954 EmitMovwMovtPlaceholder(labels, argument);
8955 } else {
8956 LoadBootImageAddress(argument, boot_image_offset);
8957 }
8958 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
8959 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
8960}
8961
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01008962template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Artem Serovd4cc5b22016-11-04 11:19:09 +00008963inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
8964 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01008965 ArenaVector<linker::LinkerPatch>* linker_patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00008966 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008967 const DexFile* dex_file = info.target_dex_file;
Artem Serovd4cc5b22016-11-04 11:19:09 +00008968 size_t offset_or_index = info.offset_or_index;
8969 DCHECK(info.add_pc_label.IsBound());
8970 uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.GetLocation());
8971 // Add MOVW patch.
8972 DCHECK(info.movw_label.IsBound());
8973 uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.GetLocation());
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008974 linker_patches->push_back(Factory(movw_offset, dex_file, add_pc_offset, offset_or_index));
Artem Serovd4cc5b22016-11-04 11:19:09 +00008975 // Add MOVT patch.
8976 DCHECK(info.movt_label.IsBound());
8977 uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.GetLocation());
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008978 linker_patches->push_back(Factory(movt_offset, dex_file, add_pc_offset, offset_or_index));
Artem Serovd4cc5b22016-11-04 11:19:09 +00008979 }
8980}
8981
Vladimir Marko6fd16062018-06-26 11:02:04 +01008982template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
8983linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
8984 const DexFile* target_dex_file,
8985 uint32_t pc_insn_offset,
8986 uint32_t boot_image_offset) {
8987 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
8988 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00008989}
8990
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01008991void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Artem Serovd4cc5b22016-11-04 11:19:09 +00008992 DCHECK(linker_patches->empty());
8993 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008994 /* MOVW+MOVT for each entry */ 2u * boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01008995 /* MOVW+MOVT for each entry */ 2u * method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008996 /* MOVW+MOVT for each entry */ 2u * boot_image_type_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008997 /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008998 /* MOVW+MOVT for each entry */ 2u * boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01008999 /* MOVW+MOVT for each entry */ 2u * string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01009000 /* MOVW+MOVT for each entry */ 2u * boot_image_intrinsic_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009001 baker_read_barrier_patches_.size();
Artem Serovd4cc5b22016-11-04 11:19:09 +00009002 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01009003 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009004 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009005 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009006 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009007 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009008 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00009009 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01009010 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
9011 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01009012 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01009013 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00009014 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00009015 DCHECK(boot_image_type_patches_.empty());
9016 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01009017 DCHECK(boot_image_intrinsic_patches_.empty());
Artem Serovd4cc5b22016-11-04 11:19:09 +00009018 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009019 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
9020 method_bss_entry_patches_, linker_patches);
9021 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
9022 type_bss_entry_patches_, linker_patches);
9023 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
9024 string_bss_entry_patches_, linker_patches);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009025 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01009026 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
9027 info.label.GetLocation(), info.custom_data));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009028 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00009029 DCHECK_EQ(size, linker_patches->size());
Artem Serovc5fcb442016-12-02 19:19:58 +00009030}
9031
Vladimir Markoca1e0382018-04-11 09:58:41 +00009032bool CodeGeneratorARMVIXL::NeedsThunkCode(const linker::LinkerPatch& patch) const {
9033 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
9034 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
9035}
9036
9037void CodeGeneratorARMVIXL::EmitThunkCode(const linker::LinkerPatch& patch,
9038 /*out*/ ArenaVector<uint8_t>* code,
9039 /*out*/ std::string* debug_name) {
9040 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator());
9041 switch (patch.GetType()) {
9042 case linker::LinkerPatch::Type::kCallRelative:
9043 // The thunk just uses the entry point in the ArtMethod. This works even for calls
9044 // to the generic JNI and interpreter trampolines.
9045 assembler.LoadFromOffset(
9046 arm::kLoadWord,
9047 vixl32::pc,
9048 vixl32::r0,
9049 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
9050 assembler.GetVIXLAssembler()->Bkpt(0);
9051 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
9052 *debug_name = "MethodCallThunk";
9053 }
9054 break;
9055 case linker::LinkerPatch::Type::kBakerReadBarrierBranch:
9056 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
9057 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
9058 break;
9059 default:
9060 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
9061 UNREACHABLE();
9062 }
9063
9064 // Ensure we emit the literal pool if any.
9065 assembler.FinalizeCode();
9066 code->resize(assembler.CodeSize());
9067 MemoryRegion code_region(code->data(), code->size());
9068 assembler.FinalizeInstructions(code_region);
9069}
9070
Artem Serovc5fcb442016-12-02 19:19:58 +00009071VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateUint32Literal(
9072 uint32_t value,
9073 Uint32ToLiteralMap* map) {
9074 return map->GetOrCreate(
9075 value,
9076 [this, value]() {
Andreas Gampe3db70682018-12-26 15:12:03 -08009077 return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ value);
Artem Serovc5fcb442016-12-02 19:19:58 +00009078 });
9079}
9080
Artem Serov2bbc9532016-10-21 11:51:50 +01009081void LocationsBuilderARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9082 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009083 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Serov2bbc9532016-10-21 11:51:50 +01009084 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
9085 Location::RequiresRegister());
9086 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
9087 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
9088 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
9089}
9090
9091void InstructionCodeGeneratorARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9092 vixl32::Register res = OutputRegister(instr);
9093 vixl32::Register accumulator =
9094 InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
9095 vixl32::Register mul_left =
9096 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
9097 vixl32::Register mul_right =
9098 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
9099
9100 if (instr->GetOpKind() == HInstruction::kAdd) {
9101 __ Mla(res, mul_left, mul_right, accumulator);
9102 } else {
9103 __ Mls(res, mul_left, mul_right, accumulator);
9104 }
9105}
9106
Artem Serov551b28f2016-10-18 19:11:30 +01009107void LocationsBuilderARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
9108 // Nothing to do, this should be removed during prepare for register allocator.
9109 LOG(FATAL) << "Unreachable";
9110}
9111
9112void InstructionCodeGeneratorARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
9113 // Nothing to do, this should be removed during prepare for register allocator.
9114 LOG(FATAL) << "Unreachable";
9115}
9116
9117// Simple implementation of packed switch - generate cascaded compare/jumps.
9118void LocationsBuilderARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9119 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009120 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Artem Serov551b28f2016-10-18 19:11:30 +01009121 locations->SetInAt(0, Location::RequiresRegister());
9122 if (switch_instr->GetNumEntries() > kPackedSwitchCompareJumpThreshold &&
9123 codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
9124 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the table base.
9125 if (switch_instr->GetStartValue() != 0) {
9126 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the bias.
9127 }
9128 }
9129}
9130
9131// TODO(VIXL): Investigate and reach the parity with old arm codegen.
9132void InstructionCodeGeneratorARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9133 int32_t lower_bound = switch_instr->GetStartValue();
9134 uint32_t num_entries = switch_instr->GetNumEntries();
9135 LocationSummary* locations = switch_instr->GetLocations();
9136 vixl32::Register value_reg = InputRegisterAt(switch_instr, 0);
9137 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9138
9139 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
9140 !codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
9141 // Create a series of compare/jumps.
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009142 UseScratchRegisterScope temps(GetVIXLAssembler());
Artem Serov551b28f2016-10-18 19:11:30 +01009143 vixl32::Register temp_reg = temps.Acquire();
9144 // Note: It is fine for the below AddConstantSetFlags() using IP register to temporarily store
9145 // the immediate, because IP is used as the destination register. For the other
9146 // AddConstantSetFlags() and GenerateCompareWithImmediate(), the immediate values are constant,
9147 // and they can be encoded in the instruction without making use of IP register.
9148 __ Adds(temp_reg, value_reg, -lower_bound);
9149
9150 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
9151 // Jump to successors[0] if value == lower_bound.
9152 __ B(eq, codegen_->GetLabelOf(successors[0]));
9153 int32_t last_index = 0;
9154 for (; num_entries - last_index > 2; last_index += 2) {
9155 __ Adds(temp_reg, temp_reg, -2);
9156 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
9157 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
9158 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
9159 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
9160 }
9161 if (num_entries - last_index == 2) {
9162 // The last missing case_value.
9163 __ Cmp(temp_reg, 1);
9164 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
9165 }
9166
9167 // And the default for any other value.
9168 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
9169 __ B(codegen_->GetLabelOf(default_block));
9170 }
9171 } else {
9172 // Create a table lookup.
9173 vixl32::Register table_base = RegisterFrom(locations->GetTemp(0));
9174
9175 JumpTableARMVIXL* jump_table = codegen_->CreateJumpTable(switch_instr);
9176
9177 // Remove the bias.
9178 vixl32::Register key_reg;
9179 if (lower_bound != 0) {
9180 key_reg = RegisterFrom(locations->GetTemp(1));
9181 __ Sub(key_reg, value_reg, lower_bound);
9182 } else {
9183 key_reg = value_reg;
9184 }
9185
9186 // Check whether the value is in the table, jump to default block if not.
9187 __ Cmp(key_reg, num_entries - 1);
9188 __ B(hi, codegen_->GetLabelOf(default_block));
9189
Anton Kirilovedb2ac32016-11-30 15:14:10 +00009190 UseScratchRegisterScope temps(GetVIXLAssembler());
Artem Serov551b28f2016-10-18 19:11:30 +01009191 vixl32::Register jump_offset = temps.Acquire();
9192
9193 // Load jump offset from the table.
Scott Wakeling86e9d262017-01-18 15:59:24 +00009194 {
9195 const size_t jump_size = switch_instr->GetNumEntries() * sizeof(int32_t);
9196 ExactAssemblyScope aas(GetVIXLAssembler(),
9197 (vixl32::kMaxInstructionSizeInBytes * 4) + jump_size,
9198 CodeBufferCheckScope::kMaximumSize);
9199 __ adr(table_base, jump_table->GetTableStartLabel());
9200 __ ldr(jump_offset, MemOperand(table_base, key_reg, vixl32::LSL, 2));
Artem Serov551b28f2016-10-18 19:11:30 +01009201
Scott Wakeling86e9d262017-01-18 15:59:24 +00009202 // Jump to target block by branching to table_base(pc related) + offset.
9203 vixl32::Register target_address = table_base;
9204 __ add(target_address, table_base, jump_offset);
9205 __ bx(target_address);
Artem Serov09a940d2016-11-11 16:15:11 +00009206
Scott Wakeling86e9d262017-01-18 15:59:24 +00009207 jump_table->EmitTable(codegen_);
9208 }
Artem Serov551b28f2016-10-18 19:11:30 +01009209 }
9210}
9211
Artem Serov02d37832016-10-25 15:25:33 +01009212// Copy the result of a call into the given target.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009213void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg, DataType::Type type) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009214 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009215 DCHECK_EQ(type, DataType::Type::kVoid);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009216 return;
9217 }
9218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009219 DCHECK_NE(type, DataType::Type::kVoid);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009220
Artem Serovd4cc5b22016-11-04 11:19:09 +00009221 Location return_loc = InvokeDexCallingConventionVisitorARMVIXL().GetReturnLocation(type);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009222 if (return_loc.Equals(trg)) {
9223 return;
9224 }
9225
9226 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
9227 // with the last branch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009228 if (type == DataType::Type::kInt64) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009229 TODO_VIXL32(FATAL);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01009230 } else if (type == DataType::Type::kFloat64) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009231 TODO_VIXL32(FATAL);
9232 } else {
9233 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01009234 HParallelMove parallel_move(GetGraph()->GetAllocator());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01009235 parallel_move.AddMove(return_loc, trg, type, nullptr);
9236 GetMoveResolver()->EmitNativeCode(&parallel_move);
9237 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01009238}
Scott Wakelingfe885462016-09-22 10:24:38 +01009239
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009240void LocationsBuilderARMVIXL::VisitClassTableGet(HClassTableGet* instruction) {
9241 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01009242 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009243 locations->SetInAt(0, Location::RequiresRegister());
9244 locations->SetOut(Location::RequiresRegister());
Artem Serov551b28f2016-10-18 19:11:30 +01009245}
9246
xueliang.zhong8d2c4592016-11-23 17:05:25 +00009247void InstructionCodeGeneratorARMVIXL::VisitClassTableGet(HClassTableGet* instruction) {
9248 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
9249 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
9250 instruction->GetIndex(), kArmPointerSize).SizeValue();
9251 GetAssembler()->LoadFromOffset(kLoadWord,
9252 OutputRegister(instruction),
9253 InputRegisterAt(instruction, 0),
9254 method_offset);
9255 } else {
9256 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
9257 instruction->GetIndex(), kArmPointerSize));
9258 GetAssembler()->LoadFromOffset(kLoadWord,
9259 OutputRegister(instruction),
9260 InputRegisterAt(instruction, 0),
9261 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
9262 GetAssembler()->LoadFromOffset(kLoadWord,
9263 OutputRegister(instruction),
9264 OutputRegister(instruction),
9265 method_offset);
9266 }
Artem Serov551b28f2016-10-18 19:11:30 +01009267}
9268
Artem Serovc5fcb442016-12-02 19:19:58 +00009269static void PatchJitRootUse(uint8_t* code,
9270 const uint8_t* roots_data,
9271 VIXLUInt32Literal* literal,
9272 uint64_t index_in_table) {
9273 DCHECK(literal->IsBound());
9274 uint32_t literal_offset = literal->GetLocation();
9275 uintptr_t address =
9276 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
9277 uint8_t* data = code + literal_offset;
9278 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
9279}
9280
9281void CodeGeneratorARMVIXL::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
9282 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009283 const StringReference& string_reference = entry.first;
9284 VIXLUInt32Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01009285 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009286 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Artem Serovc5fcb442016-12-02 19:19:58 +00009287 }
9288 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009289 const TypeReference& type_reference = entry.first;
9290 VIXLUInt32Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01009291 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009292 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Artem Serovc5fcb442016-12-02 19:19:58 +00009293 }
9294}
9295
Artem Serovd4cc5b22016-11-04 11:19:09 +00009296void CodeGeneratorARMVIXL::EmitMovwMovtPlaceholder(
9297 CodeGeneratorARMVIXL::PcRelativePatchInfo* labels,
9298 vixl32::Register out) {
Artem Serov0fb37192016-12-06 18:13:40 +00009299 ExactAssemblyScope aas(GetVIXLAssembler(),
9300 3 * vixl32::kMaxInstructionSizeInBytes,
9301 CodeBufferCheckScope::kMaximumSize);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009302 // TODO(VIXL): Think about using mov instead of movw.
9303 __ bind(&labels->movw_label);
Andreas Gampe3db70682018-12-26 15:12:03 -08009304 __ movw(out, /* operand= */ 0u);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009305 __ bind(&labels->movt_label);
Andreas Gampe3db70682018-12-26 15:12:03 -08009306 __ movt(out, /* operand= */ 0u);
Artem Serovd4cc5b22016-11-04 11:19:09 +00009307 __ bind(&labels->add_pc_label);
9308 __ add(out, out, pc);
9309}
9310
Scott Wakelingfe885462016-09-22 10:24:38 +01009311#undef __
9312#undef QUICK_ENTRY_POINT
9313#undef TODO_VIXL32
9314
Vladimir Markoca1e0382018-04-11 09:58:41 +00009315#define __ assembler.GetVIXLAssembler()->
9316
9317static void EmitGrayCheckAndFastPath(ArmVIXLAssembler& assembler,
9318 vixl32::Register base_reg,
9319 vixl32::MemOperand& lock_word,
9320 vixl32::Label* slow_path,
Vladimir Marko7a695052018-04-12 10:26:50 +01009321 int32_t raw_ldr_offset,
9322 vixl32::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009323 // Load the lock word containing the rb_state.
9324 __ Ldr(ip, lock_word);
9325 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01009326 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Vladimir Markoca1e0382018-04-11 09:58:41 +00009327 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
9328 __ Tst(ip, Operand(LockWord::kReadBarrierStateMaskShifted));
Andreas Gampe3db70682018-12-26 15:12:03 -08009329 __ B(ne, slow_path, /* is_far_target= */ false);
Vladimir Marko7a695052018-04-12 10:26:50 +01009330 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
9331 if (throw_npe != nullptr) {
9332 __ Bind(throw_npe);
9333 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00009334 __ Add(lr, lr, raw_ldr_offset);
9335 // Introduce a dependency on the lock_word including rb_state,
9336 // to prevent load-load reordering, and without using
9337 // a memory barrier (which would be more expensive).
9338 __ Add(base_reg, base_reg, Operand(ip, LSR, 32));
9339 __ Bx(lr); // And return back to the function.
9340 // Note: The fake dependency is unnecessary for the slow path.
9341}
9342
9343// Load the read barrier introspection entrypoint in register `entrypoint`
Vladimir Markodcd117e2018-04-19 11:54:00 +01009344static vixl32::Register LoadReadBarrierMarkIntrospectionEntrypoint(ArmVIXLAssembler& assembler) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009345 // The register where the read barrier introspection entrypoint is loaded
Vladimir Markodcd117e2018-04-19 11:54:00 +01009346 // is the marking register. We clobber it here and the entrypoint restores it to 1.
9347 vixl32::Register entrypoint = mr;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009348 // entrypoint = Thread::Current()->pReadBarrierMarkReg12, i.e. pReadBarrierMarkIntrospection.
9349 DCHECK_EQ(ip.GetCode(), 12u);
9350 const int32_t entry_point_offset =
9351 Thread::ReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ip.GetCode());
9352 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
Vladimir Markodcd117e2018-04-19 11:54:00 +01009353 return entrypoint;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009354}
9355
9356void CodeGeneratorARMVIXL::CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler,
9357 uint32_t encoded_data,
9358 /*out*/ std::string* debug_name) {
9359 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
9360 switch (kind) {
9361 case BakerReadBarrierKind::kField: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009362 vixl32::Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9363 CheckValidReg(base_reg.GetCode());
9364 vixl32::Register holder_reg(BakerReadBarrierSecondRegField::Decode(encoded_data));
9365 CheckValidReg(holder_reg.GetCode());
9366 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
9367 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9368 temps.Exclude(ip);
Vladimir Marko7a695052018-04-12 10:26:50 +01009369 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
9370 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
9371 // null-check the holder as we do not necessarily do that check before going to the thunk.
9372 vixl32::Label throw_npe_label;
9373 vixl32::Label* throw_npe = nullptr;
9374 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
9375 throw_npe = &throw_npe_label;
Andreas Gampe3db70682018-12-26 15:12:03 -08009376 __ CompareAndBranchIfZero(holder_reg, throw_npe, /* is_far_target= */ false);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009377 }
Vladimir Marko7a695052018-04-12 10:26:50 +01009378 // Check if the holder is gray and, if not, add fake dependency to the base register
9379 // and return to the LDR instruction to load the reference. Otherwise, use introspection
9380 // to load the reference and call the entrypoint that performs further checks on the
9381 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00009382 vixl32::Label slow_path;
9383 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
9384 const int32_t raw_ldr_offset = (width == BakerReadBarrierWidth::kWide)
9385 ? BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET
9386 : BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET;
Vladimir Marko7a695052018-04-12 10:26:50 +01009387 EmitGrayCheckAndFastPath(
9388 assembler, base_reg, lock_word, &slow_path, raw_ldr_offset, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009389 __ Bind(&slow_path);
9390 const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
9391 raw_ldr_offset;
Vladimir Markodcd117e2018-04-19 11:54:00 +01009392 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009393 if (width == BakerReadBarrierWidth::kWide) {
9394 MemOperand ldr_half_address(lr, ldr_offset + 2);
9395 __ Ldrh(ip, ldr_half_address); // Load the LDR immediate half-word with "Rt | imm12".
9396 __ Ubfx(ip, ip, 0, 12); // Extract the offset imm12.
9397 __ Ldr(ip, MemOperand(base_reg, ip)); // Load the reference.
9398 } else {
9399 MemOperand ldr_address(lr, ldr_offset);
9400 __ Ldrh(ip, ldr_address); // Load the LDR immediate, encoding T1.
9401 __ Add(ep_reg, // Adjust the entrypoint address to the entrypoint
9402 ep_reg, // for narrow LDR.
9403 Operand(BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_ENTRYPOINT_OFFSET));
9404 __ Ubfx(ip, ip, 6, 5); // Extract the imm5, i.e. offset / 4.
9405 __ Ldr(ip, MemOperand(base_reg, ip, LSL, 2)); // Load the reference.
9406 }
9407 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
9408 __ Bx(ep_reg); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00009409 break;
9410 }
9411 case BakerReadBarrierKind::kArray: {
9412 vixl32::Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9413 CheckValidReg(base_reg.GetCode());
9414 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9415 BakerReadBarrierSecondRegField::Decode(encoded_data));
9416 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
9417 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9418 temps.Exclude(ip);
9419 vixl32::Label slow_path;
9420 int32_t data_offset =
9421 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
9422 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
9423 DCHECK_LT(lock_word.GetOffsetImmediate(), 0);
9424 const int32_t raw_ldr_offset = BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET;
9425 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset);
9426 __ Bind(&slow_path);
9427 const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
9428 raw_ldr_offset;
9429 MemOperand ldr_address(lr, ldr_offset + 2);
9430 __ Ldrb(ip, ldr_address); // Load the LDR (register) byte with "00 | imm2 | Rm",
9431 // i.e. Rm+32 because the scale in imm2 is 2.
Vladimir Markodcd117e2018-04-19 11:54:00 +01009432 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009433 __ Bfi(ep_reg, ip, 3, 6); // Insert ip to the entrypoint address to create
9434 // a switch case target based on the index register.
9435 __ Mov(ip, base_reg); // Move the base register to ip0.
9436 __ Bx(ep_reg); // Jump to the entrypoint's array switch case.
9437 break;
9438 }
Vladimir Markod887ed82018-08-14 13:52:12 +00009439 case BakerReadBarrierKind::kGcRoot:
9440 case BakerReadBarrierKind::kUnsafeCas: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009441 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
9442 // and it does not have a forwarding address), call the correct introspection entrypoint;
9443 // otherwise return the reference (or the extracted forwarding address).
9444 // There is no gray bit check for GC roots.
9445 vixl32::Register root_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
9446 CheckValidReg(root_reg.GetCode());
9447 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9448 BakerReadBarrierSecondRegField::Decode(encoded_data));
9449 BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
9450 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
9451 temps.Exclude(ip);
9452 vixl32::Label return_label, not_marked, forwarding_address;
Andreas Gampe3db70682018-12-26 15:12:03 -08009453 __ CompareAndBranchIfZero(root_reg, &return_label, /* is_far_target= */ false);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009454 MemOperand lock_word(root_reg, mirror::Object::MonitorOffset().Int32Value());
9455 __ Ldr(ip, lock_word);
9456 __ Tst(ip, LockWord::kMarkBitStateMaskShifted);
9457 __ B(eq, &not_marked);
9458 __ Bind(&return_label);
9459 __ Bx(lr);
9460 __ Bind(&not_marked);
9461 static_assert(LockWord::kStateShift == 30 && LockWord::kStateForwardingAddress == 3,
9462 "To use 'CMP ip, #modified-immediate; BHS', we need the lock word state in "
9463 " the highest bits and the 'forwarding address' state to have all bits set");
9464 __ Cmp(ip, Operand(0xc0000000));
9465 __ B(hs, &forwarding_address);
Vladimir Markodcd117e2018-04-19 11:54:00 +01009466 vixl32::Register ep_reg = LoadReadBarrierMarkIntrospectionEntrypoint(assembler);
Vladimir Markoca1e0382018-04-11 09:58:41 +00009467 // Adjust the art_quick_read_barrier_mark_introspection address in kBakerCcEntrypointRegister
Vladimir Markod887ed82018-08-14 13:52:12 +00009468 // to one of art_quick_read_barrier_mark_introspection_{gc_roots_{wide,narrow},unsafe_cas}.
9469 DCHECK(kind != BakerReadBarrierKind::kUnsafeCas || width == BakerReadBarrierWidth::kWide);
9470 int32_t entrypoint_offset =
9471 (kind == BakerReadBarrierKind::kGcRoot)
9472 ? (width == BakerReadBarrierWidth::kWide)
9473 ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_ENTRYPOINT_OFFSET
9474 : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_ENTRYPOINT_OFFSET
9475 : BAKER_MARK_INTROSPECTION_UNSAFE_CAS_ENTRYPOINT_OFFSET;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009476 __ Add(ep_reg, ep_reg, Operand(entrypoint_offset));
9477 __ Mov(ip, root_reg);
9478 __ Bx(ep_reg);
9479 __ Bind(&forwarding_address);
9480 __ Lsl(root_reg, ip, LockWord::kForwardingAddressShift);
9481 __ Bx(lr);
9482 break;
9483 }
9484 default:
9485 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
9486 UNREACHABLE();
9487 }
9488
Vladimir Marko966b46f2018-08-03 10:20:19 +00009489 // For JIT, the slow path is considered part of the compiled method,
9490 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
9491 DCHECK(Runtime::Current() == nullptr ||
9492 !Runtime::Current()->UseJitCompilation() ||
9493 debug_name == nullptr);
9494 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00009495 std::ostringstream oss;
9496 oss << "BakerReadBarrierThunk";
9497 switch (kind) {
9498 case BakerReadBarrierKind::kField:
9499 oss << "Field";
9500 if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
9501 oss << "Wide";
9502 }
9503 oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
9504 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
9505 break;
9506 case BakerReadBarrierKind::kArray:
9507 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
9508 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9509 BakerReadBarrierSecondRegField::Decode(encoded_data));
9510 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
9511 break;
9512 case BakerReadBarrierKind::kGcRoot:
9513 oss << "GcRoot";
9514 if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
9515 oss << "Wide";
9516 }
9517 oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
9518 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9519 BakerReadBarrierSecondRegField::Decode(encoded_data));
9520 break;
Vladimir Markod887ed82018-08-14 13:52:12 +00009521 case BakerReadBarrierKind::kUnsafeCas:
9522 oss << "UnsafeCas_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
9523 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
9524 BakerReadBarrierSecondRegField::Decode(encoded_data));
9525 DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
9526 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00009527 }
9528 *debug_name = oss.str();
9529 }
9530}
9531
9532#undef __
9533
Scott Wakelingfe885462016-09-22 10:24:38 +01009534} // namespace arm
9535} // namespace art