blob: cc40522731c3f5d3549d0a436e4a0af80883a85c [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm_vixl.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_utils.h"
22#include "common_arm.h"
23#include "compiled_method.h"
24#include "entrypoints/quick/quick_entrypoints.h"
25#include "gc/accounting/card_table.h"
26#include "mirror/array-inl.h"
27#include "mirror/class-inl.h"
28#include "thread.h"
29#include "utils/arm/assembler_arm_vixl.h"
30#include "utils/arm/managed_register_arm.h"
31#include "utils/assembler.h"
32#include "utils/stack_checks.h"
33
34namespace art {
35namespace arm {
36
37namespace vixl32 = vixl::aarch32;
38using namespace vixl32; // NOLINT(build/namespaces)
39
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +010040using helpers::DRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010041using helpers::DWARFReg;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010042using helpers::HighDRegisterFrom;
43using helpers::HighRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010044using helpers::InputOperandAt;
Scott Wakelingc34dba72016-10-03 10:14:44 +010045using helpers::InputRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010046using helpers::InputRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010047using helpers::InputSRegisterAt;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010048using helpers::InputVRegisterAt;
49using helpers::LocationFrom;
50using helpers::LowRegisterFrom;
51using helpers::LowSRegisterFrom;
52using helpers::OutputRegister;
53using helpers::OutputSRegister;
54using helpers::OutputVRegister;
55using helpers::RegisterFrom;
56using helpers::SRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010057
58using RegisterList = vixl32::RegisterList;
59
60static bool ExpectedPairLayout(Location location) {
61 // We expected this for both core and fpu register pairs.
62 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
63}
64
65static constexpr size_t kArmInstrMaxSizeInBytes = 4u;
66
67#ifdef __
68#error "ARM Codegen VIXL macro-assembler macro already defined."
69#endif
70
Scott Wakelingfe885462016-09-22 10:24:38 +010071// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
72#define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
73#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
74
75// Marker that code is yet to be, and must, be implemented.
76#define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented "
77
Scott Wakelinga7812ae2016-10-17 10:03:36 +010078// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
79// for each live D registers they treat two corresponding S registers as live ones.
80//
81// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
82// from a list of contiguous S registers a list of contiguous D registers (processing first/last
83// S registers corner cases) and save/restore this new list treating them as D registers.
84// - decreasing code size
85// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
86// restored and then used in regular non SlowPath code as D register.
87//
88// For the following example (v means the S register is live):
89// D names: | D0 | D1 | D2 | D4 | ...
90// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
91// Live? | | v | v | v | v | v | v | | ...
92//
93// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
94// as D registers.
95//
96// TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers
97// for lists of floating-point registers.
98static size_t SaveContiguousSRegisterList(size_t first,
99 size_t last,
100 CodeGenerator* codegen,
101 size_t stack_offset) {
102 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
103 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
104 DCHECK_LE(first, last);
105 if ((first == last) && (first == 0)) {
106 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
107 return stack_offset + kSRegSizeInBytes;
108 }
109 if (first % 2 == 1) {
110 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
111 stack_offset += kSRegSizeInBytes;
112 }
113
114 bool save_last = false;
115 if (last % 2 == 0) {
116 save_last = true;
117 --last;
118 }
119
120 if (first < last) {
121 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
122 DCHECK_EQ((last - first + 1) % 2, 0u);
123 size_t number_of_d_regs = (last - first + 1) / 2;
124
125 if (number_of_d_regs == 1) {
126 __ Vstr(d_reg, MemOperand(sp, stack_offset));
127 } else if (number_of_d_regs > 1) {
128 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
129 vixl32::Register base = sp;
130 if (stack_offset != 0) {
131 base = temps.Acquire();
132 __ Add(base, sp, stack_offset);
133 }
134 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
135 }
136 stack_offset += number_of_d_regs * kDRegSizeInBytes;
137 }
138
139 if (save_last) {
140 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
141 stack_offset += kSRegSizeInBytes;
142 }
143
144 return stack_offset;
145}
146
147static size_t RestoreContiguousSRegisterList(size_t first,
148 size_t last,
149 CodeGenerator* codegen,
150 size_t stack_offset) {
151 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
152 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
153 DCHECK_LE(first, last);
154 if ((first == last) && (first == 0)) {
155 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
156 return stack_offset + kSRegSizeInBytes;
157 }
158 if (first % 2 == 1) {
159 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
160 stack_offset += kSRegSizeInBytes;
161 }
162
163 bool restore_last = false;
164 if (last % 2 == 0) {
165 restore_last = true;
166 --last;
167 }
168
169 if (first < last) {
170 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
171 DCHECK_EQ((last - first + 1) % 2, 0u);
172 size_t number_of_d_regs = (last - first + 1) / 2;
173 if (number_of_d_regs == 1) {
174 __ Vldr(d_reg, MemOperand(sp, stack_offset));
175 } else if (number_of_d_regs > 1) {
176 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
177 vixl32::Register base = sp;
178 if (stack_offset != 0) {
179 base = temps.Acquire();
180 __ Add(base, sp, stack_offset);
181 }
182 __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
183 }
184 stack_offset += number_of_d_regs * kDRegSizeInBytes;
185 }
186
187 if (restore_last) {
188 __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
189 stack_offset += kSRegSizeInBytes;
190 }
191
192 return stack_offset;
193}
194
195void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
196 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
197 size_t orig_offset = stack_offset;
198
199 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
200 for (uint32_t i : LowToHighBits(core_spills)) {
201 // If the register holds an object, update the stack mask.
202 if (locations->RegisterContainsObject(i)) {
203 locations->SetStackBit(stack_offset / kVRegSize);
204 }
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_core_stack_offsets_[i] = stack_offset;
208 stack_offset += kArmWordSize;
209 }
210
211 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
212 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset);
213
214 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
215 orig_offset = stack_offset;
216 for (uint32_t i : LowToHighBits(fp_spills)) {
217 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
218 saved_fpu_stack_offsets_[i] = stack_offset;
219 stack_offset += kArmWordSize;
220 }
221
222 stack_offset = orig_offset;
223 while (fp_spills != 0u) {
224 uint32_t begin = CTZ(fp_spills);
225 uint32_t tmp = fp_spills + (1u << begin);
226 fp_spills &= tmp; // Clear the contiguous range of 1s.
227 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
228 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
229 }
230 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
231}
232
233void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
234 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
235 size_t orig_offset = stack_offset;
236
237 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
238 for (uint32_t i : LowToHighBits(core_spills)) {
239 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
240 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
241 stack_offset += kArmWordSize;
242 }
243
244 // TODO(VIXL): Check the coherency of stack_offset after this with a test.
245 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
246 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset);
247
248 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
249 while (fp_spills != 0u) {
250 uint32_t begin = CTZ(fp_spills);
251 uint32_t tmp = fp_spills + (1u << begin);
252 fp_spills &= tmp; // Clear the contiguous range of 1s.
253 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
254 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
255 }
256 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
257}
258
259class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
260 public:
261 explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {}
262
263 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
264 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
265 __ Bind(GetEntryLabel());
266 if (instruction_->CanThrowIntoCatchBlock()) {
267 // Live registers will be restored in the catch block if caught.
268 SaveLiveRegisters(codegen, instruction_->GetLocations());
269 }
270 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
271 instruction_,
272 instruction_->GetDexPc(),
273 this);
274 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
275 }
276
277 bool IsFatal() const OVERRIDE { return true; }
278
279 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARMVIXL"; }
280
281 private:
282 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL);
283};
284
Scott Wakelingfe885462016-09-22 10:24:38 +0100285class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
286 public:
287 explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction)
288 : SlowPathCodeARMVIXL(instruction) {}
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100291 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
Scott Wakelingfe885462016-09-22 10:24:38 +0100292 __ Bind(GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100293 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Scott Wakelingfe885462016-09-22 10:24:38 +0100294 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
295 }
296
297 bool IsFatal() const OVERRIDE { return true; }
298
299 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARMVIXL"; }
300
301 private:
302 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL);
303};
304
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100305class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
306 public:
307 SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor)
308 : SlowPathCodeARMVIXL(instruction), successor_(successor) {}
309
310 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
311 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
312 __ Bind(GetEntryLabel());
313 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
314 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
315 if (successor_ == nullptr) {
316 __ B(GetReturnLabel());
317 } else {
318 __ B(arm_codegen->GetLabelOf(successor_));
319 }
320 }
321
322 vixl32::Label* GetReturnLabel() {
323 DCHECK(successor_ == nullptr);
324 return &return_label_;
325 }
326
327 HBasicBlock* GetSuccessor() const {
328 return successor_;
329 }
330
331 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARMVIXL"; }
332
333 private:
334 // If not null, the block to branch to after the suspend check.
335 HBasicBlock* const successor_;
336
337 // If `successor_` is null, the label to branch to after the suspend check.
338 vixl32::Label return_label_;
339
340 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL);
341};
342
Scott Wakelingc34dba72016-10-03 10:14:44 +0100343class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
344 public:
345 explicit BoundsCheckSlowPathARMVIXL(HBoundsCheck* instruction)
346 : SlowPathCodeARMVIXL(instruction) {}
347
348 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
349 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
350 LocationSummary* locations = instruction_->GetLocations();
351
352 __ Bind(GetEntryLabel());
353 if (instruction_->CanThrowIntoCatchBlock()) {
354 // Live registers will be restored in the catch block if caught.
355 SaveLiveRegisters(codegen, instruction_->GetLocations());
356 }
357 // We're moving two locations to locations that could overlap, so we need a parallel
358 // move resolver.
359 InvokeRuntimeCallingConventionARMVIXL calling_convention;
360 codegen->EmitParallelMoves(
361 locations->InAt(0),
362 LocationFrom(calling_convention.GetRegisterAt(0)),
363 Primitive::kPrimInt,
364 locations->InAt(1),
365 LocationFrom(calling_convention.GetRegisterAt(1)),
366 Primitive::kPrimInt);
367 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
368 ? kQuickThrowStringBounds
369 : kQuickThrowArrayBounds;
370 arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
371 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
372 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
373 }
374
375 bool IsFatal() const OVERRIDE { return true; }
376
377 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARMVIXL"; }
378
379 private:
380 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARMVIXL);
381};
382
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100383class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
384 public:
385 LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
386 : SlowPathCodeARMVIXL(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
387 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
388 }
389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
391 LocationSummary* locations = at_->GetLocations();
392
393 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
394 __ Bind(GetEntryLabel());
395 SaveLiveRegisters(codegen, locations);
396
397 InvokeRuntimeCallingConventionARMVIXL calling_convention;
398 __ Mov(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
399 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
400 : kQuickInitializeType;
401 arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
402 if (do_clinit_) {
403 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
404 } else {
405 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
406 }
407
408 // Move the class to the desired location.
409 Location out = locations->Out();
410 if (out.IsValid()) {
411 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
412 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
413 }
414 RestoreLiveRegisters(codegen, locations);
415 __ B(GetExitLabel());
416 }
417
418 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARMVIXL"; }
419
420 private:
421 // The class this slow path will load.
422 HLoadClass* const cls_;
423
424 // The instruction where this slow path is happening.
425 // (Might be the load class or an initialization check).
426 HInstruction* const at_;
427
428 // The dex PC of `at_`.
429 const uint32_t dex_pc_;
430
431 // Whether to initialize the class.
432 const bool do_clinit_;
433
434 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL);
435};
436
Scott Wakelingc34dba72016-10-03 10:14:44 +0100437class DeoptimizationSlowPathARMVIXL : public SlowPathCodeARMVIXL {
438 public:
439 explicit DeoptimizationSlowPathARMVIXL(HDeoptimize* instruction)
440 : SlowPathCodeARMVIXL(instruction) {}
441
442 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
443 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
444 __ Bind(GetEntryLabel());
445 arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
446 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
447 }
448
449 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARMVIXL"; }
450
451 private:
452 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARMVIXL);
453};
454
455class ArraySetSlowPathARMVIXL : public SlowPathCodeARMVIXL {
456 public:
457 explicit ArraySetSlowPathARMVIXL(HInstruction* instruction) : SlowPathCodeARMVIXL(instruction) {}
458
459 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
460 LocationSummary* locations = instruction_->GetLocations();
461 __ Bind(GetEntryLabel());
462 SaveLiveRegisters(codegen, locations);
463
464 InvokeRuntimeCallingConventionARMVIXL calling_convention;
465 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
466 parallel_move.AddMove(
467 locations->InAt(0),
468 LocationFrom(calling_convention.GetRegisterAt(0)),
469 Primitive::kPrimNot,
470 nullptr);
471 parallel_move.AddMove(
472 locations->InAt(1),
473 LocationFrom(calling_convention.GetRegisterAt(1)),
474 Primitive::kPrimInt,
475 nullptr);
476 parallel_move.AddMove(
477 locations->InAt(2),
478 LocationFrom(calling_convention.GetRegisterAt(2)),
479 Primitive::kPrimNot,
480 nullptr);
481 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
482
483 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
484 arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
485 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
486 RestoreLiveRegisters(codegen, locations);
487 __ B(GetExitLabel());
488 }
489
490 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARMVIXL"; }
491
492 private:
493 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARMVIXL);
494};
495
496
Scott Wakelingfe885462016-09-22 10:24:38 +0100497inline vixl32::Condition ARMCondition(IfCondition cond) {
498 switch (cond) {
499 case kCondEQ: return eq;
500 case kCondNE: return ne;
501 case kCondLT: return lt;
502 case kCondLE: return le;
503 case kCondGT: return gt;
504 case kCondGE: return ge;
505 case kCondB: return lo;
506 case kCondBE: return ls;
507 case kCondA: return hi;
508 case kCondAE: return hs;
509 }
510 LOG(FATAL) << "Unreachable";
511 UNREACHABLE();
512}
513
514// Maps signed condition to unsigned condition.
515inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) {
516 switch (cond) {
517 case kCondEQ: return eq;
518 case kCondNE: return ne;
519 // Signed to unsigned.
520 case kCondLT: return lo;
521 case kCondLE: return ls;
522 case kCondGT: return hi;
523 case kCondGE: return hs;
524 // Unsigned remain unchanged.
525 case kCondB: return lo;
526 case kCondBE: return ls;
527 case kCondA: return hi;
528 case kCondAE: return hs;
529 }
530 LOG(FATAL) << "Unreachable";
531 UNREACHABLE();
532}
533
534inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
535 // The ARM condition codes can express all the necessary branches, see the
536 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
537 // There is no dex instruction or HIR that would need the missing conditions
538 // "equal or unordered" or "not equal".
539 switch (cond) {
540 case kCondEQ: return eq;
541 case kCondNE: return ne /* unordered */;
542 case kCondLT: return gt_bias ? cc : lt /* unordered */;
543 case kCondLE: return gt_bias ? ls : le /* unordered */;
544 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
545 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
546 default:
547 LOG(FATAL) << "UNREACHABLE";
548 UNREACHABLE();
549 }
550}
551
Scott Wakelingfe885462016-09-22 10:24:38 +0100552void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const {
553 stream << vixl32::Register(reg);
554}
555
556void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
557 stream << vixl32::SRegister(reg);
558}
559
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100560static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100561 uint32_t mask = 0;
562 for (uint32_t i = regs.GetFirstSRegister().GetCode();
563 i <= regs.GetLastSRegister().GetCode();
564 ++i) {
565 mask |= (1 << i);
566 }
567 return mask;
568}
569
570#undef __
571
572CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
573 const ArmInstructionSetFeatures& isa_features,
574 const CompilerOptions& compiler_options,
575 OptimizingCompilerStats* stats)
576 : CodeGenerator(graph,
577 kNumberOfCoreRegisters,
578 kNumberOfSRegisters,
579 kNumberOfRegisterPairs,
580 kCoreCalleeSaves.GetList(),
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100581 ComputeSRegisterListMask(kFpuCalleeSaves),
Scott Wakelingfe885462016-09-22 10:24:38 +0100582 compiler_options,
583 stats),
584 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
585 location_builder_(graph, this),
586 instruction_visitor_(graph, this),
587 move_resolver_(graph->GetArena(), this),
588 assembler_(graph->GetArena()),
589 isa_features_(isa_features) {
590 // Always save the LR register to mimic Quick.
591 AddAllocatedRegister(Location::RegisterLocation(LR));
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100592 // Give d14 and d15 as scratch registers to VIXL.
593 // They are removed from the register allocator in `SetupBlockedRegisters()`.
594 // TODO(VIXL): We need two scratch D registers for `EmitSwap` when swapping two double stack
595 // slots. If that is sufficiently rare, and we have pressure on FP registers, we could instead
596 // spill in `EmitSwap`. But if we actually are guaranteed to have 32 D registers, we could give
597 // d30 and d31 to VIXL to avoid removing registers from the allocator. If that is the case, we may
598 // also want to investigate giving those 14 other D registers to the allocator.
599 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d14);
600 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d15);
Scott Wakelingfe885462016-09-22 10:24:38 +0100601}
602
603#define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()->
604
605void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) {
606 GetAssembler()->FinalizeCode();
607 CodeGenerator::Finalize(allocator);
608}
609
610void CodeGeneratorARMVIXL::SetupBlockedRegisters() const {
Scott Wakelingfe885462016-09-22 10:24:38 +0100611 // Stack register, LR and PC are always reserved.
612 blocked_core_registers_[SP] = true;
613 blocked_core_registers_[LR] = true;
614 blocked_core_registers_[PC] = true;
615
616 // Reserve thread register.
617 blocked_core_registers_[TR] = true;
618
619 // Reserve temp register.
620 blocked_core_registers_[IP] = true;
621
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100622 // Registers s28-s31 (d14-d15) are left to VIXL for scratch registers.
623 // (They are given to the `MacroAssembler` in `CodeGeneratorARMVIXL::CodeGeneratorARMVIXL`.)
624 blocked_fpu_registers_[28] = true;
625 blocked_fpu_registers_[29] = true;
626 blocked_fpu_registers_[30] = true;
627 blocked_fpu_registers_[31] = true;
628
Scott Wakelingfe885462016-09-22 10:24:38 +0100629 if (GetGraph()->IsDebuggable()) {
630 // Stubs do not save callee-save floating point registers. If the graph
631 // is debuggable, we need to deal with these registers differently. For
632 // now, just block them.
633 for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode();
634 i <= kFpuCalleeSaves.GetLastSRegister().GetCode();
635 ++i) {
636 blocked_fpu_registers_[i] = true;
637 }
638 }
Scott Wakelingfe885462016-09-22 10:24:38 +0100639}
640
Scott Wakelingfe885462016-09-22 10:24:38 +0100641InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph,
642 CodeGeneratorARMVIXL* codegen)
643 : InstructionCodeGenerator(graph, codegen),
644 assembler_(codegen->GetAssembler()),
645 codegen_(codegen) {}
646
647void CodeGeneratorARMVIXL::ComputeSpillMask() {
648 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
649 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
650 // There is no easy instruction to restore just the PC on thumb2. We spill and
651 // restore another arbitrary register.
652 core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister.GetCode());
653 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
654 // We use vpush and vpop for saving and restoring floating point registers, which take
655 // a SRegister and the number of registers to save/restore after that SRegister. We
656 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
657 // but in the range.
658 if (fpu_spill_mask_ != 0) {
659 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
660 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
661 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
662 fpu_spill_mask_ |= (1 << i);
663 }
664 }
665}
666
667void CodeGeneratorARMVIXL::GenerateFrameEntry() {
668 bool skip_overflow_check =
669 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
670 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
671 __ Bind(&frame_entry_label_);
672
673 if (HasEmptyFrame()) {
674 return;
675 }
676
Scott Wakelingfe885462016-09-22 10:24:38 +0100677 if (!skip_overflow_check) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100678 UseScratchRegisterScope temps(GetVIXLAssembler());
679 vixl32::Register temp = temps.Acquire();
Scott Wakelingfe885462016-09-22 10:24:38 +0100680 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
681 // The load must immediately precede RecordPcInfo.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100682 AssemblerAccurateScope aas(GetVIXLAssembler(),
683 kArmInstrMaxSizeInBytes,
684 CodeBufferCheckScope::kMaximumSize);
685 __ ldr(temp, MemOperand(temp));
686 RecordPcInfo(nullptr, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +0100687 }
688
689 __ Push(RegisterList(core_spill_mask_));
690 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
691 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
692 0,
693 core_spill_mask_,
694 kArmWordSize);
695 if (fpu_spill_mask_ != 0) {
696 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
697
698 // Check that list is contiguous.
699 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
700
701 __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
702 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100703 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0), 0, fpu_spill_mask_, kArmWordSize);
Scott Wakelingfe885462016-09-22 10:24:38 +0100704 }
705 int adjust = GetFrameSize() - FrameEntrySpillSize();
706 __ Sub(sp, sp, adjust);
707 GetAssembler()->cfi().AdjustCFAOffset(adjust);
708 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
709}
710
711void CodeGeneratorARMVIXL::GenerateFrameExit() {
712 if (HasEmptyFrame()) {
713 __ Bx(lr);
714 return;
715 }
716 GetAssembler()->cfi().RememberState();
717 int adjust = GetFrameSize() - FrameEntrySpillSize();
718 __ Add(sp, sp, adjust);
719 GetAssembler()->cfi().AdjustCFAOffset(-adjust);
720 if (fpu_spill_mask_ != 0) {
721 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
722
723 // Check that list is contiguous.
724 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
725
726 __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
727 GetAssembler()->cfi().AdjustCFAOffset(
728 -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100729 GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_);
Scott Wakelingfe885462016-09-22 10:24:38 +0100730 }
731 // Pop LR into PC to return.
732 DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U);
733 uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode;
734 __ Pop(RegisterList(pop_mask));
735 GetAssembler()->cfi().RestoreState();
736 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
737}
738
739void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) {
740 __ Bind(GetLabelOf(block));
741}
742
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100743void CodeGeneratorARMVIXL::Move32(Location destination, Location source) {
744 if (source.Equals(destination)) {
745 return;
746 }
747 if (destination.IsRegister()) {
748 if (source.IsRegister()) {
749 __ Mov(RegisterFrom(destination), RegisterFrom(source));
750 } else if (source.IsFpuRegister()) {
751 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
752 } else {
753 GetAssembler()->LoadFromOffset(kLoadWord,
754 RegisterFrom(destination),
755 sp,
756 source.GetStackIndex());
757 }
758 } else if (destination.IsFpuRegister()) {
759 if (source.IsRegister()) {
760 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
761 } else if (source.IsFpuRegister()) {
762 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
763 } else {
764 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
765 }
766 } else {
767 DCHECK(destination.IsStackSlot()) << destination;
768 if (source.IsRegister()) {
769 GetAssembler()->StoreToOffset(kStoreWord,
770 RegisterFrom(source),
771 sp,
772 destination.GetStackIndex());
773 } else if (source.IsFpuRegister()) {
774 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
775 } else {
776 DCHECK(source.IsStackSlot()) << source;
777 UseScratchRegisterScope temps(GetVIXLAssembler());
778 vixl32::Register temp = temps.Acquire();
779 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
780 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
781 }
782 }
783}
784
785void CodeGeneratorARMVIXL::MoveConstant(Location destination ATTRIBUTE_UNUSED,
786 int32_t value ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100787 TODO_VIXL32(FATAL);
788}
789
790void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100791 // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in
792 // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend.
793 HParallelMove move(GetGraph()->GetArena());
794 move.AddMove(src, dst, dst_type, nullptr);
795 GetMoveResolver()->EmitNativeCode(&move);
Scott Wakelingfe885462016-09-22 10:24:38 +0100796}
797
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100798void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location ATTRIBUTE_UNUSED,
799 LocationSummary* locations ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100800 TODO_VIXL32(FATAL);
801}
802
803void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
804 HInstruction* instruction,
805 uint32_t dex_pc,
806 SlowPathCode* slow_path) {
807 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
808 GenerateInvokeRuntime(GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value());
809 if (EntrypointRequiresStackMap(entrypoint)) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100810 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
811 // previous instruction.
Scott Wakelingfe885462016-09-22 10:24:38 +0100812 RecordPcInfo(instruction, dex_pc, slow_path);
813 }
814}
815
816void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
817 HInstruction* instruction,
818 SlowPathCode* slow_path) {
819 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
820 GenerateInvokeRuntime(entry_point_offset);
821}
822
823void CodeGeneratorARMVIXL::GenerateInvokeRuntime(int32_t entry_point_offset) {
824 GetAssembler()->LoadFromOffset(kLoadWord, lr, tr, entry_point_offset);
825 __ Blx(lr);
826}
827
Scott Wakelingfe885462016-09-22 10:24:38 +0100828void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) {
829 DCHECK(!successor->IsExitBlock());
830 HBasicBlock* block = got->GetBlock();
831 HInstruction* previous = got->GetPrevious();
832 HLoopInformation* info = block->GetLoopInformation();
833
834 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
835 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
836 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
837 return;
838 }
839 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
840 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
841 }
842 if (!codegen_->GoesToNextBlock(block, successor)) {
843 __ B(codegen_->GetLabelOf(successor));
844 }
845}
846
847void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) {
848 got->SetLocations(nullptr);
849}
850
851void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) {
852 HandleGoto(got, got->GetSuccessor());
853}
854
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100855void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
856 try_boundary->SetLocations(nullptr);
857}
858
859void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
860 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
861 if (!successor->IsExitBlock()) {
862 HandleGoto(try_boundary, successor);
863 }
864}
865
Scott Wakelingfe885462016-09-22 10:24:38 +0100866void LocationsBuilderARMVIXL::VisitExit(HExit* exit) {
867 exit->SetLocations(nullptr);
868}
869
870void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
871}
872
873void InstructionCodeGeneratorARMVIXL::GenerateVcmp(HInstruction* instruction) {
874 Primitive::Type type = instruction->InputAt(0)->GetType();
875 Location lhs_loc = instruction->GetLocations()->InAt(0);
876 Location rhs_loc = instruction->GetLocations()->InAt(1);
877 if (rhs_loc.IsConstant()) {
878 // 0.0 is the only immediate that can be encoded directly in
879 // a VCMP instruction.
880 //
881 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
882 // specify that in a floating-point comparison, positive zero
883 // and negative zero are considered equal, so we can use the
884 // literal 0.0 for both cases here.
885 //
886 // Note however that some methods (Float.equal, Float.compare,
887 // Float.compareTo, Double.equal, Double.compare,
888 // Double.compareTo, Math.max, Math.min, StrictMath.max,
889 // StrictMath.min) consider 0.0 to be (strictly) greater than
890 // -0.0. So if we ever translate calls to these methods into a
891 // HCompare instruction, we must handle the -0.0 case with
892 // care here.
893 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
894 if (type == Primitive::kPrimFloat) {
895 __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0);
896 } else {
897 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelingc34dba72016-10-03 10:14:44 +0100898 __ Vcmp(F64, DRegisterFrom(lhs_loc), 0.0);
Scott Wakelingfe885462016-09-22 10:24:38 +0100899 }
900 } else {
901 if (type == Primitive::kPrimFloat) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100902 __ Vcmp(InputSRegisterAt(instruction, 0), InputSRegisterAt(instruction, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +0100903 } else {
904 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelingc34dba72016-10-03 10:14:44 +0100905 __ Vcmp(DRegisterFrom(lhs_loc), DRegisterFrom(rhs_loc));
Scott Wakelingfe885462016-09-22 10:24:38 +0100906 }
907 }
908}
909
910void InstructionCodeGeneratorARMVIXL::GenerateFPJumps(HCondition* cond,
911 vixl32::Label* true_label,
912 vixl32::Label* false_label ATTRIBUTE_UNUSED) {
913 // To branch on the result of the FP compare we transfer FPSCR to APSR (encoded as PC in VMRS).
914 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
915 __ B(ARMFPCondition(cond->GetCondition(), cond->IsGtBias()), true_label);
916}
917
918void InstructionCodeGeneratorARMVIXL::GenerateLongComparesAndJumps(HCondition* cond,
919 vixl32::Label* true_label,
920 vixl32::Label* false_label) {
921 LocationSummary* locations = cond->GetLocations();
922 Location left = locations->InAt(0);
923 Location right = locations->InAt(1);
924 IfCondition if_cond = cond->GetCondition();
925
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100926 vixl32::Register left_high = HighRegisterFrom(left);
927 vixl32::Register left_low = LowRegisterFrom(left);
Scott Wakelingfe885462016-09-22 10:24:38 +0100928 IfCondition true_high_cond = if_cond;
929 IfCondition false_high_cond = cond->GetOppositeCondition();
930 vixl32::Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part
931
932 // Set the conditions for the test, remembering that == needs to be
933 // decided using the low words.
934 // TODO: consider avoiding jumps with temporary and CMP low+SBC high
935 switch (if_cond) {
936 case kCondEQ:
937 case kCondNE:
938 // Nothing to do.
939 break;
940 case kCondLT:
941 false_high_cond = kCondGT;
942 break;
943 case kCondLE:
944 true_high_cond = kCondLT;
945 break;
946 case kCondGT:
947 false_high_cond = kCondLT;
948 break;
949 case kCondGE:
950 true_high_cond = kCondGT;
951 break;
952 case kCondB:
953 false_high_cond = kCondA;
954 break;
955 case kCondBE:
956 true_high_cond = kCondB;
957 break;
958 case kCondA:
959 false_high_cond = kCondB;
960 break;
961 case kCondAE:
962 true_high_cond = kCondA;
963 break;
964 }
965 if (right.IsConstant()) {
966 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
967 int32_t val_low = Low32Bits(value);
968 int32_t val_high = High32Bits(value);
969
970 __ Cmp(left_high, val_high);
971 if (if_cond == kCondNE) {
972 __ B(ARMCondition(true_high_cond), true_label);
973 } else if (if_cond == kCondEQ) {
974 __ B(ARMCondition(false_high_cond), false_label);
975 } else {
976 __ B(ARMCondition(true_high_cond), true_label);
977 __ B(ARMCondition(false_high_cond), false_label);
978 }
979 // Must be equal high, so compare the lows.
980 __ Cmp(left_low, val_low);
981 } else {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100982 vixl32::Register right_high = HighRegisterFrom(right);
983 vixl32::Register right_low = LowRegisterFrom(right);
Scott Wakelingfe885462016-09-22 10:24:38 +0100984
985 __ Cmp(left_high, right_high);
986 if (if_cond == kCondNE) {
987 __ B(ARMCondition(true_high_cond), true_label);
988 } else if (if_cond == kCondEQ) {
989 __ B(ARMCondition(false_high_cond), false_label);
990 } else {
991 __ B(ARMCondition(true_high_cond), true_label);
992 __ B(ARMCondition(false_high_cond), false_label);
993 }
994 // Must be equal high, so compare the lows.
995 __ Cmp(left_low, right_low);
996 }
997 // The last comparison might be unsigned.
998 // TODO: optimize cases where this is always true/false
999 __ B(final_condition, true_label);
1000}
1001
1002void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
1003 vixl32::Label* true_target_in,
1004 vixl32::Label* false_target_in) {
1005 // Generated branching requires both targets to be explicit. If either of the
1006 // targets is nullptr (fallthrough) use and bind `fallthrough` instead.
1007 vixl32::Label fallthrough;
1008 vixl32::Label* true_target = (true_target_in == nullptr) ? &fallthrough : true_target_in;
1009 vixl32::Label* false_target = (false_target_in == nullptr) ? &fallthrough : false_target_in;
1010
1011 Primitive::Type type = condition->InputAt(0)->GetType();
1012 switch (type) {
1013 case Primitive::kPrimLong:
1014 GenerateLongComparesAndJumps(condition, true_target, false_target);
1015 break;
1016 case Primitive::kPrimFloat:
1017 case Primitive::kPrimDouble:
1018 GenerateVcmp(condition);
1019 GenerateFPJumps(condition, true_target, false_target);
1020 break;
1021 default:
1022 LOG(FATAL) << "Unexpected compare type " << type;
1023 }
1024
1025 if (false_target != &fallthrough) {
1026 __ B(false_target);
1027 }
1028
1029 if (true_target_in == nullptr || false_target_in == nullptr) {
1030 __ Bind(&fallthrough);
1031 }
1032}
1033
1034void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction,
1035 size_t condition_input_index,
1036 vixl32::Label* true_target,
1037 vixl32::Label* false_target) {
1038 HInstruction* cond = instruction->InputAt(condition_input_index);
1039
1040 if (true_target == nullptr && false_target == nullptr) {
1041 // Nothing to do. The code always falls through.
1042 return;
1043 } else if (cond->IsIntConstant()) {
1044 // Constant condition, statically compared against "true" (integer value 1).
1045 if (cond->AsIntConstant()->IsTrue()) {
1046 if (true_target != nullptr) {
1047 __ B(true_target);
1048 }
1049 } else {
1050 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
1051 if (false_target != nullptr) {
1052 __ B(false_target);
1053 }
1054 }
1055 return;
1056 }
1057
1058 // The following code generates these patterns:
1059 // (1) true_target == nullptr && false_target != nullptr
1060 // - opposite condition true => branch to false_target
1061 // (2) true_target != nullptr && false_target == nullptr
1062 // - condition true => branch to true_target
1063 // (3) true_target != nullptr && false_target != nullptr
1064 // - condition true => branch to true_target
1065 // - branch to false_target
1066 if (IsBooleanValueOrMaterializedCondition(cond)) {
1067 // Condition has been materialized, compare the output to 0.
1068 if (kIsDebugBuild) {
1069 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
1070 DCHECK(cond_val.IsRegister());
1071 }
1072 if (true_target == nullptr) {
1073 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
1074 } else {
1075 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
1076 }
1077 } else {
1078 // Condition has not been materialized. Use its inputs as the comparison and
1079 // its condition as the branch condition.
1080 HCondition* condition = cond->AsCondition();
1081
1082 // If this is a long or FP comparison that has been folded into
1083 // the HCondition, generate the comparison directly.
1084 Primitive::Type type = condition->InputAt(0)->GetType();
1085 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1086 GenerateCompareTestAndBranch(condition, true_target, false_target);
1087 return;
1088 }
1089
1090 LocationSummary* locations = cond->GetLocations();
1091 DCHECK(locations->InAt(0).IsRegister());
1092 vixl32::Register left = InputRegisterAt(cond, 0);
1093 Location right = locations->InAt(1);
1094 if (right.IsRegister()) {
1095 __ Cmp(left, InputRegisterAt(cond, 1));
1096 } else {
1097 DCHECK(right.IsConstant());
1098 __ Cmp(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
1099 }
1100 if (true_target == nullptr) {
1101 __ B(ARMCondition(condition->GetOppositeCondition()), false_target);
1102 } else {
1103 __ B(ARMCondition(condition->GetCondition()), true_target);
1104 }
1105 }
1106
1107 // If neither branch falls through (case 3), the conditional branch to `true_target`
1108 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1109 if (true_target != nullptr && false_target != nullptr) {
1110 __ B(false_target);
1111 }
1112}
1113
1114void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) {
1115 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1116 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
1117 locations->SetInAt(0, Location::RequiresRegister());
1118 }
1119}
1120
1121void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) {
1122 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1123 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001124 vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1125 nullptr : codegen_->GetLabelOf(true_successor);
1126 vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1127 nullptr : codegen_->GetLabelOf(false_successor);
Scott Wakelingfe885462016-09-22 10:24:38 +01001128 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
1129}
1130
Scott Wakelingc34dba72016-10-03 10:14:44 +01001131void LocationsBuilderARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
1132 LocationSummary* locations = new (GetGraph()->GetArena())
1133 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1134 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1135 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
1136 locations->SetInAt(0, Location::RequiresRegister());
1137 }
1138}
1139
1140void InstructionCodeGeneratorARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
1141 SlowPathCodeARMVIXL* slow_path =
1142 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARMVIXL>(deoptimize);
1143 GenerateTestAndBranch(deoptimize,
1144 /* condition_input_index */ 0,
1145 slow_path->GetEntryLabel(),
1146 /* false_target */ nullptr);
1147}
1148
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001149void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) {
1150 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1151 if (Primitive::IsFloatingPointType(select->GetType())) {
1152 locations->SetInAt(0, Location::RequiresFpuRegister());
1153 locations->SetInAt(1, Location::RequiresFpuRegister());
1154 } else {
1155 locations->SetInAt(0, Location::RequiresRegister());
1156 locations->SetInAt(1, Location::RequiresRegister());
1157 }
1158 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1159 locations->SetInAt(2, Location::RequiresRegister());
1160 }
1161 locations->SetOut(Location::SameAsFirstInput());
1162}
1163
1164void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) {
1165 LocationSummary* locations = select->GetLocations();
1166 vixl32::Label false_target;
1167 GenerateTestAndBranch(select,
1168 /* condition_input_index */ 2,
1169 /* true_target */ nullptr,
1170 &false_target);
1171 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1172 __ Bind(&false_target);
1173}
1174
Scott Wakelingfe885462016-09-22 10:24:38 +01001175void CodeGeneratorARMVIXL::GenerateNop() {
1176 __ Nop();
1177}
1178
1179void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) {
1180 LocationSummary* locations =
1181 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
1182 // Handle the long/FP comparisons made in instruction simplification.
1183 switch (cond->InputAt(0)->GetType()) {
1184 case Primitive::kPrimLong:
1185 locations->SetInAt(0, Location::RequiresRegister());
1186 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1187 if (!cond->IsEmittedAtUseSite()) {
1188 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1189 }
1190 break;
1191
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001192 // TODO(VIXL): https://android-review.googlesource.com/#/c/252265/
Scott Wakelingfe885462016-09-22 10:24:38 +01001193 case Primitive::kPrimFloat:
1194 case Primitive::kPrimDouble:
1195 locations->SetInAt(0, Location::RequiresFpuRegister());
1196 locations->SetInAt(1, Location::RequiresFpuRegister());
1197 if (!cond->IsEmittedAtUseSite()) {
1198 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1199 }
1200 break;
1201
1202 default:
1203 locations->SetInAt(0, Location::RequiresRegister());
1204 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1205 if (!cond->IsEmittedAtUseSite()) {
1206 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1207 }
1208 }
1209}
1210
1211void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) {
1212 if (cond->IsEmittedAtUseSite()) {
1213 return;
1214 }
1215
Scott Wakelingfe885462016-09-22 10:24:38 +01001216 vixl32::Register out = OutputRegister(cond);
1217 vixl32::Label true_label, false_label;
1218
1219 switch (cond->InputAt(0)->GetType()) {
1220 default: {
1221 // Integer case.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001222 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
1223 AssemblerAccurateScope aas(GetVIXLAssembler(),
1224 kArmInstrMaxSizeInBytes * 3u,
1225 CodeBufferCheckScope::kMaximumSize);
1226 __ ite(ARMCondition(cond->GetCondition()));
1227 __ mov(ARMCondition(cond->GetCondition()), OutputRegister(cond), 1);
1228 __ mov(ARMCondition(cond->GetOppositeCondition()), OutputRegister(cond), 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01001229 return;
1230 }
1231 case Primitive::kPrimLong:
1232 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1233 break;
1234 case Primitive::kPrimFloat:
1235 case Primitive::kPrimDouble:
1236 GenerateVcmp(cond);
1237 GenerateFPJumps(cond, &true_label, &false_label);
1238 break;
1239 }
1240
1241 // Convert the jumps into the result.
1242 vixl32::Label done_label;
1243
1244 // False case: result = 0.
1245 __ Bind(&false_label);
1246 __ Mov(out, 0);
1247 __ B(&done_label);
1248
1249 // True case: result = 1.
1250 __ Bind(&true_label);
1251 __ Mov(out, 1);
1252 __ Bind(&done_label);
1253}
1254
1255void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) {
1256 HandleCondition(comp);
1257}
1258
1259void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) {
1260 HandleCondition(comp);
1261}
1262
1263void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) {
1264 HandleCondition(comp);
1265}
1266
1267void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) {
1268 HandleCondition(comp);
1269}
1270
1271void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) {
1272 HandleCondition(comp);
1273}
1274
1275void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) {
1276 HandleCondition(comp);
1277}
1278
1279void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1280 HandleCondition(comp);
1281}
1282
1283void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1284 HandleCondition(comp);
1285}
1286
1287void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1288 HandleCondition(comp);
1289}
1290
1291void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1292 HandleCondition(comp);
1293}
1294
1295void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1296 HandleCondition(comp);
1297}
1298
1299void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1300 HandleCondition(comp);
1301}
1302
1303void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) {
1304 HandleCondition(comp);
1305}
1306
1307void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) {
1308 HandleCondition(comp);
1309}
1310
1311void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1312 HandleCondition(comp);
1313}
1314
1315void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1316 HandleCondition(comp);
1317}
1318
1319void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) {
1320 HandleCondition(comp);
1321}
1322
1323void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) {
1324 HandleCondition(comp);
1325}
1326
1327void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1328 HandleCondition(comp);
1329}
1330
1331void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1332 HandleCondition(comp);
1333}
1334
1335void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) {
1336 LocationSummary* locations =
1337 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1338 locations->SetOut(Location::ConstantLocation(constant));
1339}
1340
1341void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
1342 // Will be generated at use site.
1343}
1344
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001345void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) {
1346 LocationSummary* locations =
1347 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1348 locations->SetOut(Location::ConstantLocation(constant));
1349}
1350
1351void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
1352 // Will be generated at use site.
1353}
1354
Scott Wakelingfe885462016-09-22 10:24:38 +01001355void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) {
1356 LocationSummary* locations =
1357 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1358 locations->SetOut(Location::ConstantLocation(constant));
1359}
1360
1361void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
1362 // Will be generated at use site.
1363}
1364
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001365void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) {
1366 LocationSummary* locations =
1367 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1368 locations->SetOut(Location::ConstantLocation(constant));
1369}
1370
Scott Wakelingc34dba72016-10-03 10:14:44 +01001371void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(
1372 HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001373 // Will be generated at use site.
1374}
1375
1376void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) {
1377 LocationSummary* locations =
1378 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1379 locations->SetOut(Location::ConstantLocation(constant));
1380}
1381
Scott Wakelingc34dba72016-10-03 10:14:44 +01001382void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(
1383 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001384 // Will be generated at use site.
1385}
1386
Scott Wakelingfe885462016-09-22 10:24:38 +01001387void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1388 memory_barrier->SetLocations(nullptr);
1389}
1390
1391void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1392 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
1393}
1394
1395void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) {
1396 ret->SetLocations(nullptr);
1397}
1398
1399void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
1400 codegen_->GenerateFrameExit();
1401}
1402
1403void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) {
1404 LocationSummary* locations =
1405 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1406 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1407}
1408
1409void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
1410 codegen_->GenerateFrameExit();
1411}
1412
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001413void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1414 // Explicit clinit checks triggered by static invokes must have been pruned by
1415 // art::PrepareForRegisterAllocation.
1416 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1417
1418 // TODO(VIXL): TryDispatch
1419
1420 HandleInvoke(invoke);
1421}
1422
1423void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1424 // Explicit clinit checks triggered by static invokes must have been pruned by
1425 // art::PrepareForRegisterAllocation.
1426 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1427
1428 // TODO(VIXL): TryGenerateIntrinsicCode
1429
1430 LocationSummary* locations = invoke->GetLocations();
1431 DCHECK(locations->HasTemps());
1432 codegen_->GenerateStaticOrDirectCall(invoke, locations->GetTemp(0));
1433 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1434 // previous instruction.
1435 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1436}
1437
1438void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) {
1439 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
1440 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
1441}
1442
1443void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1444 // TODO(VIXL): TryDispatch
1445
1446 HandleInvoke(invoke);
1447}
1448
1449void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1450 // TODO(VIXL): TryGenerateIntrinsicCode
1451
1452 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
1453 DCHECK(!codegen_->IsLeafMethod());
1454 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1455 // previous instruction.
1456 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1457}
1458
Artem Serov02109dd2016-09-23 17:17:54 +01001459void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) {
1460 LocationSummary* locations =
1461 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1462 switch (neg->GetResultType()) {
1463 case Primitive::kPrimInt: {
1464 locations->SetInAt(0, Location::RequiresRegister());
1465 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1466 break;
1467 }
1468 case Primitive::kPrimLong: {
1469 locations->SetInAt(0, Location::RequiresRegister());
1470 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1471 break;
1472 }
1473
1474 case Primitive::kPrimFloat:
1475 case Primitive::kPrimDouble:
1476 locations->SetInAt(0, Location::RequiresFpuRegister());
1477 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1478 break;
1479
1480 default:
1481 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1482 }
1483}
1484
1485void InstructionCodeGeneratorARMVIXL::VisitNeg(HNeg* neg) {
1486 LocationSummary* locations = neg->GetLocations();
1487 Location out = locations->Out();
1488 Location in = locations->InAt(0);
1489 switch (neg->GetResultType()) {
1490 case Primitive::kPrimInt:
1491 __ Rsb(OutputRegister(neg), InputRegisterAt(neg, 0), 0);
1492 break;
1493
1494 case Primitive::kPrimLong:
1495 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1496 __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0);
1497 // We cannot emit an RSC (Reverse Subtract with Carry)
1498 // instruction here, as it does not exist in the Thumb-2
1499 // instruction set. We use the following approach
1500 // using SBC and SUB instead.
1501 //
1502 // out.hi = -C
1503 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out));
1504 // out.hi = out.hi - in.hi
1505 __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in));
1506 break;
1507
1508 case Primitive::kPrimFloat:
1509 case Primitive::kPrimDouble:
1510 __ Vneg(OutputVRegister(neg), InputVRegisterAt(neg, 0));
1511 break;
1512
1513 default:
1514 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1515 }
1516}
1517
Scott Wakelingfe885462016-09-22 10:24:38 +01001518void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1519 Primitive::Type result_type = conversion->GetResultType();
1520 Primitive::Type input_type = conversion->GetInputType();
1521 DCHECK_NE(result_type, input_type);
1522
1523 // The float-to-long, double-to-long and long-to-float type conversions
1524 // rely on a call to the runtime.
1525 LocationSummary::CallKind call_kind =
1526 (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1527 && result_type == Primitive::kPrimLong)
1528 || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat))
1529 ? LocationSummary::kCallOnMainOnly
1530 : LocationSummary::kNoCall;
1531 LocationSummary* locations =
1532 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1533
1534 // The Java language does not allow treating boolean as an integral type but
1535 // our bit representation makes it safe.
1536
1537 switch (result_type) {
1538 case Primitive::kPrimByte:
1539 switch (input_type) {
1540 case Primitive::kPrimLong:
1541 // Type conversion from long to byte is a result of code transformations.
1542 case Primitive::kPrimBoolean:
1543 // Boolean input is a result of code transformations.
1544 case Primitive::kPrimShort:
1545 case Primitive::kPrimInt:
1546 case Primitive::kPrimChar:
1547 // Processing a Dex `int-to-byte' instruction.
1548 locations->SetInAt(0, Location::RequiresRegister());
1549 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1550 break;
1551
1552 default:
1553 LOG(FATAL) << "Unexpected type conversion from " << input_type
1554 << " to " << result_type;
1555 }
1556 break;
1557
1558 case Primitive::kPrimShort:
1559 switch (input_type) {
1560 case Primitive::kPrimLong:
1561 // Type conversion from long to short is a result of code transformations.
1562 case Primitive::kPrimBoolean:
1563 // Boolean input is a result of code transformations.
1564 case Primitive::kPrimByte:
1565 case Primitive::kPrimInt:
1566 case Primitive::kPrimChar:
1567 // Processing a Dex `int-to-short' instruction.
1568 locations->SetInAt(0, Location::RequiresRegister());
1569 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1570 break;
1571
1572 default:
1573 LOG(FATAL) << "Unexpected type conversion from " << input_type
1574 << " to " << result_type;
1575 }
1576 break;
1577
1578 case Primitive::kPrimInt:
1579 switch (input_type) {
1580 case Primitive::kPrimLong:
1581 // Processing a Dex `long-to-int' instruction.
1582 locations->SetInAt(0, Location::Any());
1583 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1584 break;
1585
1586 case Primitive::kPrimFloat:
1587 // Processing a Dex `float-to-int' instruction.
1588 locations->SetInAt(0, Location::RequiresFpuRegister());
1589 locations->SetOut(Location::RequiresRegister());
1590 locations->AddTemp(Location::RequiresFpuRegister());
1591 break;
1592
1593 case Primitive::kPrimDouble:
1594 // Processing a Dex `double-to-int' instruction.
1595 locations->SetInAt(0, Location::RequiresFpuRegister());
1596 locations->SetOut(Location::RequiresRegister());
1597 locations->AddTemp(Location::RequiresFpuRegister());
1598 break;
1599
1600 default:
1601 LOG(FATAL) << "Unexpected type conversion from " << input_type
1602 << " to " << result_type;
1603 }
1604 break;
1605
1606 case Primitive::kPrimLong:
1607 switch (input_type) {
1608 case Primitive::kPrimBoolean:
1609 // Boolean input is a result of code transformations.
1610 case Primitive::kPrimByte:
1611 case Primitive::kPrimShort:
1612 case Primitive::kPrimInt:
1613 case Primitive::kPrimChar:
1614 // Processing a Dex `int-to-long' instruction.
1615 locations->SetInAt(0, Location::RequiresRegister());
1616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1617 break;
1618
1619 case Primitive::kPrimFloat: {
1620 // Processing a Dex `float-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001621 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1622 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1623 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001624 break;
1625 }
1626
1627 case Primitive::kPrimDouble: {
1628 // Processing a Dex `double-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001629 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1630 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0),
1631 calling_convention.GetFpuRegisterAt(1)));
1632 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001633 break;
1634 }
1635
1636 default:
1637 LOG(FATAL) << "Unexpected type conversion from " << input_type
1638 << " to " << result_type;
1639 }
1640 break;
1641
1642 case Primitive::kPrimChar:
1643 switch (input_type) {
1644 case Primitive::kPrimLong:
1645 // Type conversion from long to char is a result of code transformations.
1646 case Primitive::kPrimBoolean:
1647 // Boolean input is a result of code transformations.
1648 case Primitive::kPrimByte:
1649 case Primitive::kPrimShort:
1650 case Primitive::kPrimInt:
1651 // Processing a Dex `int-to-char' instruction.
1652 locations->SetInAt(0, Location::RequiresRegister());
1653 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1654 break;
1655
1656 default:
1657 LOG(FATAL) << "Unexpected type conversion from " << input_type
1658 << " to " << result_type;
1659 }
1660 break;
1661
1662 case Primitive::kPrimFloat:
1663 switch (input_type) {
1664 case Primitive::kPrimBoolean:
1665 // Boolean input is a result of code transformations.
1666 case Primitive::kPrimByte:
1667 case Primitive::kPrimShort:
1668 case Primitive::kPrimInt:
1669 case Primitive::kPrimChar:
1670 // Processing a Dex `int-to-float' instruction.
1671 locations->SetInAt(0, Location::RequiresRegister());
1672 locations->SetOut(Location::RequiresFpuRegister());
1673 break;
1674
1675 case Primitive::kPrimLong: {
1676 // Processing a Dex `long-to-float' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001677 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1678 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0),
1679 calling_convention.GetRegisterAt(1)));
1680 locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01001681 break;
1682 }
1683
1684 case Primitive::kPrimDouble:
1685 // Processing a Dex `double-to-float' instruction.
1686 locations->SetInAt(0, Location::RequiresFpuRegister());
1687 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1688 break;
1689
1690 default:
1691 LOG(FATAL) << "Unexpected type conversion from " << input_type
1692 << " to " << result_type;
1693 };
1694 break;
1695
1696 case Primitive::kPrimDouble:
1697 switch (input_type) {
1698 case Primitive::kPrimBoolean:
1699 // Boolean input is a result of code transformations.
1700 case Primitive::kPrimByte:
1701 case Primitive::kPrimShort:
1702 case Primitive::kPrimInt:
1703 case Primitive::kPrimChar:
1704 // Processing a Dex `int-to-double' instruction.
1705 locations->SetInAt(0, Location::RequiresRegister());
1706 locations->SetOut(Location::RequiresFpuRegister());
1707 break;
1708
1709 case Primitive::kPrimLong:
1710 // Processing a Dex `long-to-double' instruction.
1711 locations->SetInAt(0, Location::RequiresRegister());
1712 locations->SetOut(Location::RequiresFpuRegister());
1713 locations->AddTemp(Location::RequiresFpuRegister());
1714 locations->AddTemp(Location::RequiresFpuRegister());
1715 break;
1716
1717 case Primitive::kPrimFloat:
1718 // Processing a Dex `float-to-double' instruction.
1719 locations->SetInAt(0, Location::RequiresFpuRegister());
1720 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1721 break;
1722
1723 default:
1724 LOG(FATAL) << "Unexpected type conversion from " << input_type
1725 << " to " << result_type;
1726 };
1727 break;
1728
1729 default:
1730 LOG(FATAL) << "Unexpected type conversion from " << input_type
1731 << " to " << result_type;
1732 }
1733}
1734
1735void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1736 LocationSummary* locations = conversion->GetLocations();
1737 Location out = locations->Out();
1738 Location in = locations->InAt(0);
1739 Primitive::Type result_type = conversion->GetResultType();
1740 Primitive::Type input_type = conversion->GetInputType();
1741 DCHECK_NE(result_type, input_type);
1742 switch (result_type) {
1743 case Primitive::kPrimByte:
1744 switch (input_type) {
1745 case Primitive::kPrimLong:
1746 // Type conversion from long to byte is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001747 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
Scott Wakelingfe885462016-09-22 10:24:38 +01001748 break;
1749 case Primitive::kPrimBoolean:
1750 // Boolean input is a result of code transformations.
1751 case Primitive::kPrimShort:
1752 case Primitive::kPrimInt:
1753 case Primitive::kPrimChar:
1754 // Processing a Dex `int-to-byte' instruction.
1755 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
1756 break;
1757
1758 default:
1759 LOG(FATAL) << "Unexpected type conversion from " << input_type
1760 << " to " << result_type;
1761 }
1762 break;
1763
1764 case Primitive::kPrimShort:
1765 switch (input_type) {
1766 case Primitive::kPrimLong:
1767 // Type conversion from long to short is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001768 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01001769 break;
1770 case Primitive::kPrimBoolean:
1771 // Boolean input is a result of code transformations.
1772 case Primitive::kPrimByte:
1773 case Primitive::kPrimInt:
1774 case Primitive::kPrimChar:
1775 // Processing a Dex `int-to-short' instruction.
1776 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
1777 break;
1778
1779 default:
1780 LOG(FATAL) << "Unexpected type conversion from " << input_type
1781 << " to " << result_type;
1782 }
1783 break;
1784
1785 case Primitive::kPrimInt:
1786 switch (input_type) {
1787 case Primitive::kPrimLong:
1788 // Processing a Dex `long-to-int' instruction.
1789 DCHECK(out.IsRegister());
1790 if (in.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001791 __ Mov(OutputRegister(conversion), LowRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01001792 } else if (in.IsDoubleStackSlot()) {
1793 GetAssembler()->LoadFromOffset(kLoadWord,
1794 OutputRegister(conversion),
1795 sp,
1796 in.GetStackIndex());
1797 } else {
1798 DCHECK(in.IsConstant());
1799 DCHECK(in.GetConstant()->IsLongConstant());
1800 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1801 __ Mov(OutputRegister(conversion), static_cast<int32_t>(value));
1802 }
1803 break;
1804
1805 case Primitive::kPrimFloat: {
1806 // Processing a Dex `float-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001807 vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001808 __ Vcvt(I32, F32, temp, InputSRegisterAt(conversion, 0));
1809 __ Vmov(OutputRegister(conversion), temp);
1810 break;
1811 }
1812
1813 case Primitive::kPrimDouble: {
1814 // Processing a Dex `double-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001815 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01001816 __ Vcvt(I32, F64, temp_s, DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01001817 __ Vmov(OutputRegister(conversion), temp_s);
1818 break;
1819 }
1820
1821 default:
1822 LOG(FATAL) << "Unexpected type conversion from " << input_type
1823 << " to " << result_type;
1824 }
1825 break;
1826
1827 case Primitive::kPrimLong:
1828 switch (input_type) {
1829 case Primitive::kPrimBoolean:
1830 // Boolean input is a result of code transformations.
1831 case Primitive::kPrimByte:
1832 case Primitive::kPrimShort:
1833 case Primitive::kPrimInt:
1834 case Primitive::kPrimChar:
1835 // Processing a Dex `int-to-long' instruction.
1836 DCHECK(out.IsRegisterPair());
1837 DCHECK(in.IsRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001838 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001839 // Sign extension.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001840 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
Scott Wakelingfe885462016-09-22 10:24:38 +01001841 break;
1842
1843 case Primitive::kPrimFloat:
1844 // Processing a Dex `float-to-long' instruction.
1845 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
1846 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
1847 break;
1848
1849 case Primitive::kPrimDouble:
1850 // Processing a Dex `double-to-long' instruction.
1851 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
1852 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
1853 break;
1854
1855 default:
1856 LOG(FATAL) << "Unexpected type conversion from " << input_type
1857 << " to " << result_type;
1858 }
1859 break;
1860
1861 case Primitive::kPrimChar:
1862 switch (input_type) {
1863 case Primitive::kPrimLong:
1864 // Type conversion from long to char is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001865 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01001866 break;
1867 case Primitive::kPrimBoolean:
1868 // Boolean input is a result of code transformations.
1869 case Primitive::kPrimByte:
1870 case Primitive::kPrimShort:
1871 case Primitive::kPrimInt:
1872 // Processing a Dex `int-to-char' instruction.
1873 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
1874 break;
1875
1876 default:
1877 LOG(FATAL) << "Unexpected type conversion from " << input_type
1878 << " to " << result_type;
1879 }
1880 break;
1881
1882 case Primitive::kPrimFloat:
1883 switch (input_type) {
1884 case Primitive::kPrimBoolean:
1885 // Boolean input is a result of code transformations.
1886 case Primitive::kPrimByte:
1887 case Primitive::kPrimShort:
1888 case Primitive::kPrimInt:
1889 case Primitive::kPrimChar: {
1890 // Processing a Dex `int-to-float' instruction.
1891 __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0));
1892 __ Vcvt(F32, I32, OutputSRegister(conversion), OutputSRegister(conversion));
1893 break;
1894 }
1895
1896 case Primitive::kPrimLong:
1897 // Processing a Dex `long-to-float' instruction.
1898 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
1899 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
1900 break;
1901
1902 case Primitive::kPrimDouble:
1903 // Processing a Dex `double-to-float' instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01001904 __ Vcvt(F32, F64, OutputSRegister(conversion), DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01001905 break;
1906
1907 default:
1908 LOG(FATAL) << "Unexpected type conversion from " << input_type
1909 << " to " << result_type;
1910 };
1911 break;
1912
1913 case Primitive::kPrimDouble:
1914 switch (input_type) {
1915 case Primitive::kPrimBoolean:
1916 // Boolean input is a result of code transformations.
1917 case Primitive::kPrimByte:
1918 case Primitive::kPrimShort:
1919 case Primitive::kPrimInt:
1920 case Primitive::kPrimChar: {
1921 // Processing a Dex `int-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001922 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01001923 __ Vcvt(F64, I32, DRegisterFrom(out), LowSRegisterFrom(out));
Scott Wakelingfe885462016-09-22 10:24:38 +01001924 break;
1925 }
1926
1927 case Primitive::kPrimLong: {
1928 // Processing a Dex `long-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001929 vixl32::Register low = LowRegisterFrom(in);
1930 vixl32::Register high = HighRegisterFrom(in);
Scott Wakelingfe885462016-09-22 10:24:38 +01001931
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001932 vixl32::SRegister out_s = LowSRegisterFrom(out);
Scott Wakelingc34dba72016-10-03 10:14:44 +01001933 vixl32::DRegister out_d = DRegisterFrom(out);
Scott Wakelingfe885462016-09-22 10:24:38 +01001934
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001935 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01001936 vixl32::DRegister temp_d = DRegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001937
Scott Wakelingc34dba72016-10-03 10:14:44 +01001938 vixl32::DRegister constant_d = DRegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001939
1940 // temp_d = int-to-double(high)
1941 __ Vmov(temp_s, high);
1942 __ Vcvt(F64, I32, temp_d, temp_s);
1943 // constant_d = k2Pow32EncodingForDouble
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001944 __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
Scott Wakelingfe885462016-09-22 10:24:38 +01001945 // out_d = unsigned-to-double(low)
1946 __ Vmov(out_s, low);
1947 __ Vcvt(F64, U32, out_d, out_s);
1948 // out_d += temp_d * constant_d
1949 __ Vmla(F64, out_d, temp_d, constant_d);
1950 break;
1951 }
1952
1953 case Primitive::kPrimFloat:
1954 // Processing a Dex `float-to-double' instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01001955 __ Vcvt(F64, F32, DRegisterFrom(out), InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001956 break;
1957
1958 default:
1959 LOG(FATAL) << "Unexpected type conversion from " << input_type
1960 << " to " << result_type;
1961 };
1962 break;
1963
1964 default:
1965 LOG(FATAL) << "Unexpected type conversion from " << input_type
1966 << " to " << result_type;
1967 }
1968}
1969
1970void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) {
1971 LocationSummary* locations =
1972 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1973 switch (add->GetResultType()) {
1974 case Primitive::kPrimInt: {
1975 locations->SetInAt(0, Location::RequiresRegister());
1976 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1977 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1978 break;
1979 }
1980
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001981 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01001982 case Primitive::kPrimLong: {
1983 locations->SetInAt(0, Location::RequiresRegister());
1984 locations->SetInAt(1, Location::RequiresRegister());
1985 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1986 break;
1987 }
1988
1989 case Primitive::kPrimFloat:
1990 case Primitive::kPrimDouble: {
1991 locations->SetInAt(0, Location::RequiresFpuRegister());
1992 locations->SetInAt(1, Location::RequiresFpuRegister());
1993 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1994 break;
1995 }
1996
1997 default:
1998 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1999 }
2000}
2001
2002void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) {
2003 LocationSummary* locations = add->GetLocations();
2004 Location out = locations->Out();
2005 Location first = locations->InAt(0);
2006 Location second = locations->InAt(1);
2007
2008 switch (add->GetResultType()) {
2009 case Primitive::kPrimInt: {
2010 __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1));
2011 }
2012 break;
2013
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002014 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002015 case Primitive::kPrimLong: {
2016 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002017 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
2018 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01002019 break;
2020 }
2021
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002022 case Primitive::kPrimFloat:
Scott Wakelingfe885462016-09-22 10:24:38 +01002023 case Primitive::kPrimDouble:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002024 __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002025 break;
2026
2027 default:
2028 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2029 }
2030}
2031
2032void LocationsBuilderARMVIXL::VisitSub(HSub* sub) {
2033 LocationSummary* locations =
2034 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
2035 switch (sub->GetResultType()) {
2036 case Primitive::kPrimInt: {
2037 locations->SetInAt(0, Location::RequiresRegister());
2038 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
2039 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2040 break;
2041 }
2042
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002043 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002044 case Primitive::kPrimLong: {
2045 locations->SetInAt(0, Location::RequiresRegister());
2046 locations->SetInAt(1, Location::RequiresRegister());
2047 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2048 break;
2049 }
2050 case Primitive::kPrimFloat:
2051 case Primitive::kPrimDouble: {
2052 locations->SetInAt(0, Location::RequiresFpuRegister());
2053 locations->SetInAt(1, Location::RequiresFpuRegister());
2054 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2055 break;
2056 }
2057 default:
2058 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2059 }
2060}
2061
2062void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) {
2063 LocationSummary* locations = sub->GetLocations();
2064 Location out = locations->Out();
2065 Location first = locations->InAt(0);
2066 Location second = locations->InAt(1);
2067 switch (sub->GetResultType()) {
2068 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002069 __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002070 break;
2071 }
2072
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002073 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002074 case Primitive::kPrimLong: {
2075 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002076 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
2077 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01002078 break;
2079 }
2080
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002081 case Primitive::kPrimFloat:
2082 case Primitive::kPrimDouble:
2083 __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002084 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002085
2086 default:
2087 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2088 }
2089}
2090
2091void LocationsBuilderARMVIXL::VisitMul(HMul* mul) {
2092 LocationSummary* locations =
2093 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2094 switch (mul->GetResultType()) {
2095 case Primitive::kPrimInt:
2096 case Primitive::kPrimLong: {
2097 locations->SetInAt(0, Location::RequiresRegister());
2098 locations->SetInAt(1, Location::RequiresRegister());
2099 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2100 break;
2101 }
2102
2103 case Primitive::kPrimFloat:
2104 case Primitive::kPrimDouble: {
2105 locations->SetInAt(0, Location::RequiresFpuRegister());
2106 locations->SetInAt(1, Location::RequiresFpuRegister());
2107 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2108 break;
2109 }
2110
2111 default:
2112 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2113 }
2114}
2115
2116void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) {
2117 LocationSummary* locations = mul->GetLocations();
2118 Location out = locations->Out();
2119 Location first = locations->InAt(0);
2120 Location second = locations->InAt(1);
2121 switch (mul->GetResultType()) {
2122 case Primitive::kPrimInt: {
2123 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2124 break;
2125 }
2126 case Primitive::kPrimLong: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002127 vixl32::Register out_hi = HighRegisterFrom(out);
2128 vixl32::Register out_lo = LowRegisterFrom(out);
2129 vixl32::Register in1_hi = HighRegisterFrom(first);
2130 vixl32::Register in1_lo = LowRegisterFrom(first);
2131 vixl32::Register in2_hi = HighRegisterFrom(second);
2132 vixl32::Register in2_lo = LowRegisterFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01002133
2134 // Extra checks to protect caused by the existence of R1_R2.
2135 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2136 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2137 DCHECK_NE(out_hi.GetCode(), in1_lo.GetCode());
2138 DCHECK_NE(out_hi.GetCode(), in2_lo.GetCode());
2139
2140 // input: in1 - 64 bits, in2 - 64 bits
2141 // output: out
2142 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2143 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2144 // parts: out.lo = (in1.lo * in2.lo)[31:0]
2145
2146 UseScratchRegisterScope temps(GetVIXLAssembler());
2147 vixl32::Register temp = temps.Acquire();
2148 // temp <- in1.lo * in2.hi
2149 __ Mul(temp, in1_lo, in2_hi);
2150 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2151 __ Mla(out_hi, in1_hi, in2_lo, temp);
2152 // out.lo <- (in1.lo * in2.lo)[31:0];
2153 __ Umull(out_lo, temp, in1_lo, in2_lo);
2154 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002155 __ Add(out_hi, out_hi, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002156 break;
2157 }
2158
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002159 case Primitive::kPrimFloat:
2160 case Primitive::kPrimDouble:
2161 __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002162 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002163
2164 default:
2165 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2166 }
2167}
2168
Scott Wakelingfe885462016-09-22 10:24:38 +01002169void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2170 DCHECK(instruction->IsDiv() || instruction->IsRem());
2171 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2172
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002173 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002174 DCHECK(second.IsConstant());
2175
2176 vixl32::Register out = OutputRegister(instruction);
2177 vixl32::Register dividend = InputRegisterAt(instruction, 0);
2178 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2179 DCHECK(imm == 1 || imm == -1);
2180
2181 if (instruction->IsRem()) {
2182 __ Mov(out, 0);
2183 } else {
2184 if (imm == 1) {
2185 __ Mov(out, dividend);
2186 } else {
2187 __ Rsb(out, dividend, 0);
2188 }
2189 }
2190}
2191
2192void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2193 DCHECK(instruction->IsDiv() || instruction->IsRem());
2194 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2195
2196 LocationSummary* locations = instruction->GetLocations();
2197 Location second = locations->InAt(1);
2198 DCHECK(second.IsConstant());
2199
2200 vixl32::Register out = OutputRegister(instruction);
2201 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002202 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002203 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2204 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
2205 int ctz_imm = CTZ(abs_imm);
2206
2207 if (ctz_imm == 1) {
2208 __ Lsr(temp, dividend, 32 - ctz_imm);
2209 } else {
2210 __ Asr(temp, dividend, 31);
2211 __ Lsr(temp, temp, 32 - ctz_imm);
2212 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002213 __ Add(out, temp, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002214
2215 if (instruction->IsDiv()) {
2216 __ Asr(out, out, ctz_imm);
2217 if (imm < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002218 __ Rsb(out, out, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01002219 }
2220 } else {
2221 __ Ubfx(out, out, 0, ctz_imm);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002222 __ Sub(out, out, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002223 }
2224}
2225
2226void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2227 DCHECK(instruction->IsDiv() || instruction->IsRem());
2228 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2229
2230 LocationSummary* locations = instruction->GetLocations();
2231 Location second = locations->InAt(1);
2232 DCHECK(second.IsConstant());
2233
2234 vixl32::Register out = OutputRegister(instruction);
2235 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002236 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0));
2237 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002238 int64_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2239
2240 int64_t magic;
2241 int shift;
2242 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
2243
2244 __ Mov(temp1, magic);
2245 __ Smull(temp2, temp1, dividend, temp1);
2246
2247 if (imm > 0 && magic < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002248 __ Add(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002249 } else if (imm < 0 && magic > 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002250 __ Sub(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002251 }
2252
2253 if (shift != 0) {
2254 __ Asr(temp1, temp1, shift);
2255 }
2256
2257 if (instruction->IsDiv()) {
2258 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2259 } else {
2260 __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2261 // TODO: Strength reduction for mls.
2262 __ Mov(temp2, imm);
2263 __ Mls(out, temp1, temp2, dividend);
2264 }
2265}
2266
2267void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral(
2268 HBinaryOperation* instruction) {
2269 DCHECK(instruction->IsDiv() || instruction->IsRem());
2270 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2271
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002272 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002273 DCHECK(second.IsConstant());
2274
2275 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2276 if (imm == 0) {
2277 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2278 } else if (imm == 1 || imm == -1) {
2279 DivRemOneOrMinusOne(instruction);
2280 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
2281 DivRemByPowerOfTwo(instruction);
2282 } else {
2283 DCHECK(imm <= -2 || imm >= 2);
2284 GenerateDivRemWithAnyConstant(instruction);
2285 }
2286}
2287
2288void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) {
2289 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2290 if (div->GetResultType() == Primitive::kPrimLong) {
2291 // pLdiv runtime call.
2292 call_kind = LocationSummary::kCallOnMainOnly;
2293 } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) {
2294 // sdiv will be replaced by other instruction sequence.
2295 } else if (div->GetResultType() == Primitive::kPrimInt &&
2296 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2297 // pIdivmod runtime call.
2298 call_kind = LocationSummary::kCallOnMainOnly;
2299 }
2300
2301 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2302
2303 switch (div->GetResultType()) {
2304 case Primitive::kPrimInt: {
2305 if (div->InputAt(1)->IsConstant()) {
2306 locations->SetInAt(0, Location::RequiresRegister());
2307 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
2308 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2309 int32_t value = div->InputAt(1)->AsIntConstant()->GetValue();
2310 if (value == 1 || value == 0 || value == -1) {
2311 // No temp register required.
2312 } else {
2313 locations->AddTemp(Location::RequiresRegister());
2314 if (!IsPowerOfTwo(AbsOrMin(value))) {
2315 locations->AddTemp(Location::RequiresRegister());
2316 }
2317 }
2318 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2319 locations->SetInAt(0, Location::RequiresRegister());
2320 locations->SetInAt(1, Location::RequiresRegister());
2321 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2322 } else {
2323 TODO_VIXL32(FATAL);
2324 }
2325 break;
2326 }
2327 case Primitive::kPrimLong: {
2328 TODO_VIXL32(FATAL);
2329 break;
2330 }
2331 case Primitive::kPrimFloat:
2332 case Primitive::kPrimDouble: {
2333 locations->SetInAt(0, Location::RequiresFpuRegister());
2334 locations->SetInAt(1, Location::RequiresFpuRegister());
2335 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2336 break;
2337 }
2338
2339 default:
2340 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2341 }
2342}
2343
2344void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002345 Location rhs = div->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002346
2347 switch (div->GetResultType()) {
2348 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002349 if (rhs.IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01002350 GenerateDivRemConstantIntegral(div);
2351 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2352 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
2353 } else {
2354 TODO_VIXL32(FATAL);
2355 }
2356 break;
2357 }
2358
2359 case Primitive::kPrimLong: {
2360 TODO_VIXL32(FATAL);
2361 break;
2362 }
2363
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002364 case Primitive::kPrimFloat:
2365 case Primitive::kPrimDouble:
2366 __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002367 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002368
2369 default:
2370 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2371 }
2372}
2373
2374void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002375 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
Scott Wakelingfe885462016-09-22 10:24:38 +01002376 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2377 ? LocationSummary::kCallOnSlowPath
2378 : LocationSummary::kNoCall;
2379 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2380 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2381 if (instruction->HasUses()) {
2382 locations->SetOut(Location::SameAsFirstInput());
2383 }
2384}
2385
2386void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2387 DivZeroCheckSlowPathARMVIXL* slow_path =
2388 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARMVIXL(instruction);
2389 codegen_->AddSlowPath(slow_path);
2390
2391 LocationSummary* locations = instruction->GetLocations();
2392 Location value = locations->InAt(0);
2393
2394 switch (instruction->GetType()) {
2395 case Primitive::kPrimBoolean:
2396 case Primitive::kPrimByte:
2397 case Primitive::kPrimChar:
2398 case Primitive::kPrimShort:
2399 case Primitive::kPrimInt: {
2400 if (value.IsRegister()) {
2401 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2402 } else {
2403 DCHECK(value.IsConstant()) << value;
2404 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2405 __ B(slow_path->GetEntryLabel());
2406 }
2407 }
2408 break;
2409 }
2410 case Primitive::kPrimLong: {
2411 if (value.IsRegisterPair()) {
2412 UseScratchRegisterScope temps(GetVIXLAssembler());
2413 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002414 __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01002415 __ B(eq, slow_path->GetEntryLabel());
2416 } else {
2417 DCHECK(value.IsConstant()) << value;
2418 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2419 __ B(slow_path->GetEntryLabel());
2420 }
2421 }
2422 break;
2423 }
2424 default:
2425 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2426 }
2427}
2428
Artem Serov02109dd2016-09-23 17:17:54 +01002429void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HRor* ror) {
2430 LocationSummary* locations = ror->GetLocations();
2431 vixl32::Register in = InputRegisterAt(ror, 0);
2432 Location rhs = locations->InAt(1);
2433 vixl32::Register out = OutputRegister(ror);
2434
2435 if (rhs.IsConstant()) {
2436 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
2437 // so map all rotations to a +ve. equivalent in that range.
2438 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
2439 uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
2440 if (rot) {
2441 // Rotate, mapping left rotations to right equivalents if necessary.
2442 // (e.g. left by 2 bits == right by 30.)
2443 __ Ror(out, in, rot);
2444 } else if (!out.Is(in)) {
2445 __ Mov(out, in);
2446 }
2447 } else {
2448 __ Ror(out, in, RegisterFrom(rhs));
2449 }
2450}
2451
2452// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
2453// rotates by swapping input regs (effectively rotating by the first 32-bits of
2454// a larger rotation) or flipping direction (thus treating larger right/left
2455// rotations as sub-word sized rotations in the other direction) as appropriate.
2456void InstructionCodeGeneratorARMVIXL::HandleLongRotate(HRor* ror) {
2457 LocationSummary* locations = ror->GetLocations();
2458 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
2459 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
2460 Location rhs = locations->InAt(1);
2461 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2462 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2463
2464 if (rhs.IsConstant()) {
2465 uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant());
2466 // Map all rotations to +ve. equivalents on the interval [0,63].
2467 rot &= kMaxLongShiftDistance;
2468 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
2469 // logic below to a simple pair of binary orr.
2470 // (e.g. 34 bits == in_reg swap + 2 bits right.)
2471 if (rot >= kArmBitsPerWord) {
2472 rot -= kArmBitsPerWord;
2473 std::swap(in_reg_hi, in_reg_lo);
2474 }
2475 // Rotate, or mov to out for zero or word size rotations.
2476 if (rot != 0u) {
2477 __ Lsr(out_reg_hi, in_reg_hi, rot);
2478 __ Orr(out_reg_hi, out_reg_hi, Operand(in_reg_lo, ShiftType::LSL, kArmBitsPerWord - rot));
2479 __ Lsr(out_reg_lo, in_reg_lo, rot);
2480 __ Orr(out_reg_lo, out_reg_lo, Operand(in_reg_hi, ShiftType::LSL, kArmBitsPerWord - rot));
2481 } else {
2482 __ Mov(out_reg_lo, in_reg_lo);
2483 __ Mov(out_reg_hi, in_reg_hi);
2484 }
2485 } else {
2486 vixl32::Register shift_right = RegisterFrom(locations->GetTemp(0));
2487 vixl32::Register shift_left = RegisterFrom(locations->GetTemp(1));
2488 vixl32::Label end;
2489 vixl32::Label shift_by_32_plus_shift_right;
2490
2491 __ And(shift_right, RegisterFrom(rhs), 0x1F);
2492 __ Lsrs(shift_left, RegisterFrom(rhs), 6);
2493 // TODO(VIXL): Check that flags are kept after "vixl32::LeaveFlags" enabled.
2494 __ Rsb(shift_left, shift_right, kArmBitsPerWord);
2495 __ B(cc, &shift_by_32_plus_shift_right);
2496
2497 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
2498 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
2499 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
2500 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
2501 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
2502 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
2503 __ Lsr(shift_left, in_reg_hi, shift_right);
2504 __ Add(out_reg_lo, out_reg_lo, shift_left);
2505 __ B(&end);
2506
2507 __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right.
2508 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
2509 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
2510 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
2511 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
2512 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
2513 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
2514 __ Lsl(shift_right, in_reg_hi, shift_left);
2515 __ Add(out_reg_lo, out_reg_lo, shift_right);
2516
2517 __ Bind(&end);
2518 }
2519}
2520
2521void LocationsBuilderARMVIXL::VisitRor(HRor* ror) {
2522 LocationSummary* locations =
2523 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
2524 switch (ror->GetResultType()) {
2525 case Primitive::kPrimInt: {
2526 locations->SetInAt(0, Location::RequiresRegister());
2527 locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1)));
2528 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2529 break;
2530 }
2531 case Primitive::kPrimLong: {
2532 locations->SetInAt(0, Location::RequiresRegister());
2533 if (ror->InputAt(1)->IsConstant()) {
2534 locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant()));
2535 } else {
2536 locations->SetInAt(1, Location::RequiresRegister());
2537 locations->AddTemp(Location::RequiresRegister());
2538 locations->AddTemp(Location::RequiresRegister());
2539 }
2540 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2541 break;
2542 }
2543 default:
2544 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
2545 }
2546}
2547
2548void InstructionCodeGeneratorARMVIXL::VisitRor(HRor* ror) {
2549 Primitive::Type type = ror->GetResultType();
2550 switch (type) {
2551 case Primitive::kPrimInt: {
2552 HandleIntegerRotate(ror);
2553 break;
2554 }
2555 case Primitive::kPrimLong: {
2556 HandleLongRotate(ror);
2557 break;
2558 }
2559 default:
2560 LOG(FATAL) << "Unexpected operation type " << type;
2561 UNREACHABLE();
2562 }
2563}
2564
Artem Serov02d37832016-10-25 15:25:33 +01002565void LocationsBuilderARMVIXL::HandleShift(HBinaryOperation* op) {
2566 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2567
2568 LocationSummary* locations =
2569 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2570
2571 switch (op->GetResultType()) {
2572 case Primitive::kPrimInt: {
2573 locations->SetInAt(0, Location::RequiresRegister());
2574 if (op->InputAt(1)->IsConstant()) {
2575 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
2576 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2577 } else {
2578 locations->SetInAt(1, Location::RequiresRegister());
2579 // Make the output overlap, as it will be used to hold the masked
2580 // second input.
2581 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2582 }
2583 break;
2584 }
2585 case Primitive::kPrimLong: {
2586 locations->SetInAt(0, Location::RequiresRegister());
2587 if (op->InputAt(1)->IsConstant()) {
2588 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
2589 // For simplicity, use kOutputOverlap even though we only require that low registers
2590 // don't clash with high registers which the register allocator currently guarantees.
2591 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2592 } else {
2593 locations->SetInAt(1, Location::RequiresRegister());
2594 locations->AddTemp(Location::RequiresRegister());
2595 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2596 }
2597 break;
2598 }
2599 default:
2600 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2601 }
2602}
2603
2604void InstructionCodeGeneratorARMVIXL::HandleShift(HBinaryOperation* op) {
2605 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2606
2607 LocationSummary* locations = op->GetLocations();
2608 Location out = locations->Out();
2609 Location first = locations->InAt(0);
2610 Location second = locations->InAt(1);
2611
2612 Primitive::Type type = op->GetResultType();
2613 switch (type) {
2614 case Primitive::kPrimInt: {
2615 vixl32::Register out_reg = OutputRegister(op);
2616 vixl32::Register first_reg = InputRegisterAt(op, 0);
2617 if (second.IsRegister()) {
2618 vixl32::Register second_reg = RegisterFrom(second);
2619 // ARM doesn't mask the shift count so we need to do it ourselves.
2620 __ And(out_reg, second_reg, kMaxIntShiftDistance);
2621 if (op->IsShl()) {
2622 __ Lsl(out_reg, first_reg, out_reg);
2623 } else if (op->IsShr()) {
2624 __ Asr(out_reg, first_reg, out_reg);
2625 } else {
2626 __ Lsr(out_reg, first_reg, out_reg);
2627 }
2628 } else {
2629 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2630 uint32_t shift_value = cst & kMaxIntShiftDistance;
2631 if (shift_value == 0) { // ARM does not support shifting with 0 immediate.
2632 __ Mov(out_reg, first_reg);
2633 } else if (op->IsShl()) {
2634 __ Lsl(out_reg, first_reg, shift_value);
2635 } else if (op->IsShr()) {
2636 __ Asr(out_reg, first_reg, shift_value);
2637 } else {
2638 __ Lsr(out_reg, first_reg, shift_value);
2639 }
2640 }
2641 break;
2642 }
2643 case Primitive::kPrimLong: {
2644 vixl32::Register o_h = HighRegisterFrom(out);
2645 vixl32::Register o_l = LowRegisterFrom(out);
2646
2647 vixl32::Register high = HighRegisterFrom(first);
2648 vixl32::Register low = LowRegisterFrom(first);
2649
2650 if (second.IsRegister()) {
2651 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
2652
2653 vixl32::Register second_reg = RegisterFrom(second);
2654
2655 if (op->IsShl()) {
2656 __ And(o_l, second_reg, kMaxLongShiftDistance);
2657 // Shift the high part
2658 __ Lsl(o_h, high, o_l);
2659 // Shift the low part and `or` what overflew on the high part
2660 __ Rsb(temp, o_l, kArmBitsPerWord);
2661 __ Lsr(temp, low, temp);
2662 __ Orr(o_h, o_h, temp);
2663 // If the shift is > 32 bits, override the high part
2664 __ Subs(temp, o_l, kArmBitsPerWord);
2665 {
2666 AssemblerAccurateScope guard(GetVIXLAssembler(),
2667 3 * kArmInstrMaxSizeInBytes,
2668 CodeBufferCheckScope::kMaximumSize);
2669 __ it(pl);
2670 __ lsl(pl, o_h, low, temp);
2671 }
2672 // Shift the low part
2673 __ Lsl(o_l, low, o_l);
2674 } else if (op->IsShr()) {
2675 __ And(o_h, second_reg, kMaxLongShiftDistance);
2676 // Shift the low part
2677 __ Lsr(o_l, low, o_h);
2678 // Shift the high part and `or` what underflew on the low part
2679 __ Rsb(temp, o_h, kArmBitsPerWord);
2680 __ Lsl(temp, high, temp);
2681 __ Orr(o_l, o_l, temp);
2682 // If the shift is > 32 bits, override the low part
2683 __ Subs(temp, o_h, kArmBitsPerWord);
2684 {
2685 AssemblerAccurateScope guard(GetVIXLAssembler(),
2686 3 * kArmInstrMaxSizeInBytes,
2687 CodeBufferCheckScope::kMaximumSize);
2688 __ it(pl);
2689 __ asr(pl, o_l, high, temp);
2690 }
2691 // Shift the high part
2692 __ Asr(o_h, high, o_h);
2693 } else {
2694 __ And(o_h, second_reg, kMaxLongShiftDistance);
2695 // same as Shr except we use `Lsr`s and not `Asr`s
2696 __ Lsr(o_l, low, o_h);
2697 __ Rsb(temp, o_h, kArmBitsPerWord);
2698 __ Lsl(temp, high, temp);
2699 __ Orr(o_l, o_l, temp);
2700 __ Subs(temp, o_h, kArmBitsPerWord);
2701 {
2702 AssemblerAccurateScope guard(GetVIXLAssembler(),
2703 3 * kArmInstrMaxSizeInBytes,
2704 CodeBufferCheckScope::kMaximumSize);
2705 __ it(pl);
2706 __ lsr(pl, o_l, high, temp);
2707 }
2708 __ Lsr(o_h, high, o_h);
2709 }
2710 } else {
2711 // Register allocator doesn't create partial overlap.
2712 DCHECK(!o_l.Is(high));
2713 DCHECK(!o_h.Is(low));
2714 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2715 uint32_t shift_value = cst & kMaxLongShiftDistance;
2716 if (shift_value > 32) {
2717 if (op->IsShl()) {
2718 __ Lsl(o_h, low, shift_value - 32);
2719 __ Mov(o_l, 0);
2720 } else if (op->IsShr()) {
2721 __ Asr(o_l, high, shift_value - 32);
2722 __ Asr(o_h, high, 31);
2723 } else {
2724 __ Lsr(o_l, high, shift_value - 32);
2725 __ Mov(o_h, 0);
2726 }
2727 } else if (shift_value == 32) {
2728 if (op->IsShl()) {
2729 __ Mov(o_h, low);
2730 __ Mov(o_l, 0);
2731 } else if (op->IsShr()) {
2732 __ Mov(o_l, high);
2733 __ Asr(o_h, high, 31);
2734 } else {
2735 __ Mov(o_l, high);
2736 __ Mov(o_h, 0);
2737 }
2738 } else if (shift_value == 1) {
2739 if (op->IsShl()) {
2740 __ Lsls(o_l, low, 1);
2741 __ Adc(o_h, high, high);
2742 } else if (op->IsShr()) {
2743 __ Asrs(o_h, high, 1);
2744 __ Rrx(o_l, low);
2745 } else {
2746 __ Lsrs(o_h, high, 1);
2747 __ Rrx(o_l, low);
2748 }
2749 } else {
2750 DCHECK(2 <= shift_value && shift_value < 32) << shift_value;
2751 if (op->IsShl()) {
2752 __ Lsl(o_h, high, shift_value);
2753 __ Orr(o_h, o_h, Operand(low, ShiftType::LSR, 32 - shift_value));
2754 __ Lsl(o_l, low, shift_value);
2755 } else if (op->IsShr()) {
2756 __ Lsr(o_l, low, shift_value);
2757 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
2758 __ Asr(o_h, high, shift_value);
2759 } else {
2760 __ Lsr(o_l, low, shift_value);
2761 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
2762 __ Lsr(o_h, high, shift_value);
2763 }
2764 }
2765 }
2766 break;
2767 }
2768 default:
2769 LOG(FATAL) << "Unexpected operation type " << type;
2770 UNREACHABLE();
2771 }
2772}
2773
2774void LocationsBuilderARMVIXL::VisitShl(HShl* shl) {
2775 HandleShift(shl);
2776}
2777
2778void InstructionCodeGeneratorARMVIXL::VisitShl(HShl* shl) {
2779 HandleShift(shl);
2780}
2781
2782void LocationsBuilderARMVIXL::VisitShr(HShr* shr) {
2783 HandleShift(shr);
2784}
2785
2786void InstructionCodeGeneratorARMVIXL::VisitShr(HShr* shr) {
2787 HandleShift(shr);
2788}
2789
2790void LocationsBuilderARMVIXL::VisitUShr(HUShr* ushr) {
2791 HandleShift(ushr);
2792}
2793
2794void InstructionCodeGeneratorARMVIXL::VisitUShr(HUShr* ushr) {
2795 HandleShift(ushr);
2796}
2797
2798void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
2799 LocationSummary* locations =
2800 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
2801 if (instruction->IsStringAlloc()) {
2802 locations->AddTemp(LocationFrom(kMethodRegister));
2803 } else {
2804 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2805 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2806 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
2807 }
2808 locations->SetOut(LocationFrom(r0));
2809}
2810
2811void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
2812 // Note: if heap poisoning is enabled, the entry point takes cares
2813 // of poisoning the reference.
2814 if (instruction->IsStringAlloc()) {
2815 // String is allocated through StringFactory. Call NewEmptyString entry point.
2816 vixl32::Register temp = RegisterFrom(instruction->GetLocations()->GetTemp(0));
2817 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize);
2818 GetAssembler()->LoadFromOffset(kLoadWord, temp, tr, QUICK_ENTRY_POINT(pNewEmptyString));
2819 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, code_offset.Int32Value());
2820 AssemblerAccurateScope aas(GetVIXLAssembler(),
2821 kArmInstrMaxSizeInBytes,
2822 CodeBufferCheckScope::kMaximumSize);
2823 __ blx(lr);
2824 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2825 } else {
2826 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
2827 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
2828 }
2829}
2830
2831void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
2832 LocationSummary* locations =
2833 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
2834 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2835 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2836 locations->SetOut(LocationFrom(r0));
2837 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2838 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
2839}
2840
2841void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
2842 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2843 __ Mov(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2844 // Note: if heap poisoning is enabled, the entry point takes cares
2845 // of poisoning the reference.
2846 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
2847 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
2848}
2849
2850void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) {
2851 LocationSummary* locations =
2852 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2853 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2854 if (location.IsStackSlot()) {
2855 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2856 } else if (location.IsDoubleStackSlot()) {
2857 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2858 }
2859 locations->SetOut(location);
2860}
2861
2862void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
2863 HParameterValue* instruction ATTRIBUTE_UNUSED) {
2864 // Nothing to do, the parameter is already at its location.
2865}
2866
2867void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) {
2868 LocationSummary* locations =
2869 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2870 locations->SetOut(LocationFrom(kMethodRegister));
2871}
2872
2873void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
2874 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
2875 // Nothing to do, the method is already at its location.
2876}
2877
2878void LocationsBuilderARMVIXL::VisitNot(HNot* not_) {
2879 LocationSummary* locations =
2880 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2881 locations->SetInAt(0, Location::RequiresRegister());
2882 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2883}
2884
2885void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) {
2886 LocationSummary* locations = not_->GetLocations();
2887 Location out = locations->Out();
2888 Location in = locations->InAt(0);
2889 switch (not_->GetResultType()) {
2890 case Primitive::kPrimInt:
2891 __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0));
2892 break;
2893
2894 case Primitive::kPrimLong:
2895 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
2896 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
2897 break;
2898
2899 default:
2900 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2901 }
2902}
2903
Scott Wakelingc34dba72016-10-03 10:14:44 +01002904void LocationsBuilderARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
2905 LocationSummary* locations =
2906 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
2907 locations->SetInAt(0, Location::RequiresRegister());
2908 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2909}
2910
2911void InstructionCodeGeneratorARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
2912 __ Eor(OutputRegister(bool_not), InputRegister(bool_not), 1);
2913}
2914
Artem Serov02d37832016-10-25 15:25:33 +01002915void LocationsBuilderARMVIXL::VisitCompare(HCompare* compare) {
2916 LocationSummary* locations =
2917 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2918 switch (compare->InputAt(0)->GetType()) {
2919 case Primitive::kPrimBoolean:
2920 case Primitive::kPrimByte:
2921 case Primitive::kPrimShort:
2922 case Primitive::kPrimChar:
2923 case Primitive::kPrimInt:
2924 case Primitive::kPrimLong: {
2925 locations->SetInAt(0, Location::RequiresRegister());
2926 locations->SetInAt(1, Location::RequiresRegister());
2927 // Output overlaps because it is written before doing the low comparison.
2928 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2929 break;
2930 }
2931 case Primitive::kPrimFloat:
2932 case Primitive::kPrimDouble: {
2933 locations->SetInAt(0, Location::RequiresFpuRegister());
2934 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1)));
2935 locations->SetOut(Location::RequiresRegister());
2936 break;
2937 }
2938 default:
2939 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2940 }
2941}
2942
2943void InstructionCodeGeneratorARMVIXL::VisitCompare(HCompare* compare) {
2944 LocationSummary* locations = compare->GetLocations();
2945 vixl32::Register out = OutputRegister(compare);
2946 Location left = locations->InAt(0);
2947 Location right = locations->InAt(1);
2948
2949 vixl32::Label less, greater, done;
2950 Primitive::Type type = compare->InputAt(0)->GetType();
2951 vixl32::Condition less_cond = vixl32::Condition(kNone);
2952 switch (type) {
2953 case Primitive::kPrimBoolean:
2954 case Primitive::kPrimByte:
2955 case Primitive::kPrimShort:
2956 case Primitive::kPrimChar:
2957 case Primitive::kPrimInt: {
2958 // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags.
2959 __ Mov(out, 0);
2960 __ Cmp(RegisterFrom(left), RegisterFrom(right)); // Signed compare.
2961 less_cond = lt;
2962 break;
2963 }
2964 case Primitive::kPrimLong: {
2965 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right)); // Signed compare.
2966 __ B(lt, &less);
2967 __ B(gt, &greater);
2968 // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags.
2969 __ Mov(out, 0);
2970 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right)); // Unsigned compare.
2971 less_cond = lo;
2972 break;
2973 }
2974 case Primitive::kPrimFloat:
2975 case Primitive::kPrimDouble: {
2976 __ Mov(out, 0);
2977 GenerateVcmp(compare);
2978 // To branch on the FP compare result we transfer FPSCR to APSR (encoded as PC in VMRS).
2979 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
2980 less_cond = ARMFPCondition(kCondLT, compare->IsGtBias());
2981 break;
2982 }
2983 default:
2984 LOG(FATAL) << "Unexpected compare type " << type;
2985 UNREACHABLE();
2986 }
2987
2988 __ B(eq, &done);
2989 __ B(less_cond, &less);
2990
2991 __ Bind(&greater);
2992 __ Mov(out, 1);
2993 __ B(&done);
2994
2995 __ Bind(&less);
2996 __ Mov(out, -1);
2997
2998 __ Bind(&done);
2999}
3000
3001void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) {
3002 LocationSummary* locations =
3003 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3004 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
3005 locations->SetInAt(i, Location::Any());
3006 }
3007 locations->SetOut(Location::Any());
3008}
3009
3010void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3011 LOG(FATAL) << "Unreachable";
3012}
3013
3014void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) {
3015 // TODO (ported from quick): revisit ARM barrier kinds.
3016 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
3017 switch (kind) {
3018 case MemBarrierKind::kAnyStore:
3019 case MemBarrierKind::kLoadAny:
3020 case MemBarrierKind::kAnyAny: {
3021 flavor = DmbOptions::ISH;
3022 break;
3023 }
3024 case MemBarrierKind::kStoreStore: {
3025 flavor = DmbOptions::ISHST;
3026 break;
3027 }
3028 default:
3029 LOG(FATAL) << "Unexpected memory barrier " << kind;
3030 }
3031 __ Dmb(flavor);
3032}
3033
3034void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr,
3035 uint32_t offset,
3036 vixl32::Register out_lo,
3037 vixl32::Register out_hi) {
3038 UseScratchRegisterScope temps(GetVIXLAssembler());
3039 if (offset != 0) {
3040 vixl32::Register temp = temps.Acquire();
3041 __ Add(temp, addr, offset);
3042 addr = temp;
3043 }
3044 __ Ldrexd(out_lo, out_hi, addr);
3045}
3046
3047void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr,
3048 uint32_t offset,
3049 vixl32::Register value_lo,
3050 vixl32::Register value_hi,
3051 vixl32::Register temp1,
3052 vixl32::Register temp2,
3053 HInstruction* instruction) {
3054 UseScratchRegisterScope temps(GetVIXLAssembler());
3055 vixl32::Label fail;
3056 if (offset != 0) {
3057 vixl32::Register temp = temps.Acquire();
3058 __ Add(temp, addr, offset);
3059 addr = temp;
3060 }
3061 __ Bind(&fail);
3062 // We need a load followed by store. (The address used in a STREX instruction must
3063 // be the same as the address in the most recently executed LDREX instruction.)
3064 __ Ldrexd(temp1, temp2, addr);
3065 codegen_->MaybeRecordImplicitNullCheck(instruction);
3066 __ Strexd(temp1, value_lo, value_hi, addr);
3067 __ Cbnz(temp1, &fail);
3068}
Artem Serov02109dd2016-09-23 17:17:54 +01003069
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003070void LocationsBuilderARMVIXL::HandleFieldSet(
3071 HInstruction* instruction, const FieldInfo& field_info) {
3072 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
3073
3074 LocationSummary* locations =
3075 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3076 locations->SetInAt(0, Location::RequiresRegister());
3077
3078 Primitive::Type field_type = field_info.GetFieldType();
3079 if (Primitive::IsFloatingPointType(field_type)) {
3080 locations->SetInAt(1, Location::RequiresFpuRegister());
3081 } else {
3082 locations->SetInAt(1, Location::RequiresRegister());
3083 }
3084
3085 bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
3086 bool generate_volatile = field_info.IsVolatile()
3087 && is_wide
3088 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3089 bool needs_write_barrier =
3090 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3091 // Temporary registers for the write barrier.
3092 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
3093 if (needs_write_barrier) {
3094 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
3095 locations->AddTemp(Location::RequiresRegister());
3096 } else if (generate_volatile) {
3097 // ARM encoding have some additional constraints for ldrexd/strexd:
3098 // - registers need to be consecutive
3099 // - the first register should be even but not R14.
3100 // We don't test for ARM yet, and the assertion makes sure that we
3101 // revisit this if we ever enable ARM encoding.
3102 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
3103
3104 locations->AddTemp(Location::RequiresRegister());
3105 locations->AddTemp(Location::RequiresRegister());
3106 if (field_type == Primitive::kPrimDouble) {
3107 // For doubles we need two more registers to copy the value.
3108 locations->AddTemp(LocationFrom(r2));
3109 locations->AddTemp(LocationFrom(r3));
3110 }
3111 }
3112}
3113
3114void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction,
3115 const FieldInfo& field_info,
3116 bool value_can_be_null) {
3117 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
3118
3119 LocationSummary* locations = instruction->GetLocations();
3120 vixl32::Register base = InputRegisterAt(instruction, 0);
3121 Location value = locations->InAt(1);
3122
3123 bool is_volatile = field_info.IsVolatile();
3124 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3125 Primitive::Type field_type = field_info.GetFieldType();
3126 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3127 bool needs_write_barrier =
3128 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3129
3130 if (is_volatile) {
3131 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
3132 }
3133
3134 switch (field_type) {
3135 case Primitive::kPrimBoolean:
3136 case Primitive::kPrimByte: {
3137 GetAssembler()->StoreToOffset(kStoreByte, RegisterFrom(value), base, offset);
3138 break;
3139 }
3140
3141 case Primitive::kPrimShort:
3142 case Primitive::kPrimChar: {
3143 GetAssembler()->StoreToOffset(kStoreHalfword, RegisterFrom(value), base, offset);
3144 break;
3145 }
3146
3147 case Primitive::kPrimInt:
3148 case Primitive::kPrimNot: {
3149 if (kPoisonHeapReferences && needs_write_barrier) {
3150 // Note that in the case where `value` is a null reference,
3151 // we do not enter this block, as a null reference does not
3152 // need poisoning.
3153 DCHECK_EQ(field_type, Primitive::kPrimNot);
3154 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3155 __ Mov(temp, RegisterFrom(value));
3156 GetAssembler()->PoisonHeapReference(temp);
3157 GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset);
3158 } else {
3159 GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset);
3160 }
3161 break;
3162 }
3163
3164 case Primitive::kPrimLong: {
3165 if (is_volatile && !atomic_ldrd_strd) {
3166 GenerateWideAtomicStore(base,
3167 offset,
3168 LowRegisterFrom(value),
3169 HighRegisterFrom(value),
3170 RegisterFrom(locations->GetTemp(0)),
3171 RegisterFrom(locations->GetTemp(1)),
3172 instruction);
3173 } else {
3174 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
3175 codegen_->MaybeRecordImplicitNullCheck(instruction);
3176 }
3177 break;
3178 }
3179
3180 case Primitive::kPrimFloat: {
3181 GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
3182 break;
3183 }
3184
3185 case Primitive::kPrimDouble: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01003186 vixl32::DRegister value_reg = DRegisterFrom(value);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003187 if (is_volatile && !atomic_ldrd_strd) {
3188 vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0));
3189 vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1));
3190
3191 __ Vmov(value_reg_lo, value_reg_hi, value_reg);
3192
3193 GenerateWideAtomicStore(base,
3194 offset,
3195 value_reg_lo,
3196 value_reg_hi,
3197 RegisterFrom(locations->GetTemp(2)),
3198 RegisterFrom(locations->GetTemp(3)),
3199 instruction);
3200 } else {
3201 GetAssembler()->StoreDToOffset(value_reg, base, offset);
3202 codegen_->MaybeRecordImplicitNullCheck(instruction);
3203 }
3204 break;
3205 }
3206
3207 case Primitive::kPrimVoid:
3208 LOG(FATAL) << "Unreachable type " << field_type;
3209 UNREACHABLE();
3210 }
3211
3212 // Longs and doubles are handled in the switch.
3213 if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
3214 codegen_->MaybeRecordImplicitNullCheck(instruction);
3215 }
3216
3217 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
3218 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3219 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
3220 codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null);
3221 }
3222
3223 if (is_volatile) {
3224 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
3225 }
3226}
3227
Artem Serov02d37832016-10-25 15:25:33 +01003228void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
3229 const FieldInfo& field_info) {
3230 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3231
3232 bool object_field_get_with_read_barrier =
3233 kEmitCompilerReadBarrier && (field_info.GetFieldType() == Primitive::kPrimNot);
3234 LocationSummary* locations =
3235 new (GetGraph()->GetArena()) LocationSummary(instruction,
3236 object_field_get_with_read_barrier ?
3237 LocationSummary::kCallOnSlowPath :
3238 LocationSummary::kNoCall);
3239 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
3240 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
3241 }
3242 locations->SetInAt(0, Location::RequiresRegister());
3243
3244 bool volatile_for_double = field_info.IsVolatile()
3245 && (field_info.GetFieldType() == Primitive::kPrimDouble)
3246 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3247 // The output overlaps in case of volatile long: we don't want the
3248 // code generated by GenerateWideAtomicLoad to overwrite the
3249 // object's location. Likewise, in the case of an object field get
3250 // with read barriers enabled, we do not want the load to overwrite
3251 // the object's location, as we need it to emit the read barrier.
3252 bool overlap = (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) ||
3253 object_field_get_with_read_barrier;
3254
3255 if (Primitive::IsFloatingPointType(instruction->GetType())) {
3256 locations->SetOut(Location::RequiresFpuRegister());
3257 } else {
3258 locations->SetOut(Location::RequiresRegister(),
3259 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
3260 }
3261 if (volatile_for_double) {
3262 // ARM encoding have some additional constraints for ldrexd/strexd:
3263 // - registers need to be consecutive
3264 // - the first register should be even but not R14.
3265 // We don't test for ARM yet, and the assertion makes sure that we
3266 // revisit this if we ever enable ARM encoding.
3267 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
3268 locations->AddTemp(Location::RequiresRegister());
3269 locations->AddTemp(Location::RequiresRegister());
3270 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
3271 // We need a temporary register for the read barrier marking slow
3272 // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier.
3273 locations->AddTemp(Location::RequiresRegister());
3274 }
3275}
3276
3277Location LocationsBuilderARMVIXL::ArithmeticZeroOrFpuRegister(HInstruction* input) {
3278 DCHECK(Primitive::IsFloatingPointType(input->GetType())) << input->GetType();
3279 if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) ||
3280 (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) {
3281 return Location::ConstantLocation(input->AsConstant());
3282 } else {
3283 return Location::RequiresFpuRegister();
3284 }
3285}
3286
Artem Serov02109dd2016-09-23 17:17:54 +01003287Location LocationsBuilderARMVIXL::ArmEncodableConstantOrRegister(HInstruction* constant,
3288 Opcode opcode) {
3289 DCHECK(!Primitive::IsFloatingPointType(constant->GetType()));
3290 if (constant->IsConstant() &&
3291 CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) {
3292 return Location::ConstantLocation(constant->AsConstant());
3293 }
3294 return Location::RequiresRegister();
3295}
3296
3297bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(HConstant* input_cst,
3298 Opcode opcode) {
3299 uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst));
3300 if (Primitive::Is64BitType(input_cst->GetType())) {
3301 Opcode high_opcode = opcode;
3302 SetCc low_set_cc = kCcDontCare;
3303 switch (opcode) {
3304 case SUB:
3305 // Flip the operation to an ADD.
3306 value = -value;
3307 opcode = ADD;
3308 FALLTHROUGH_INTENDED;
3309 case ADD:
3310 if (Low32Bits(value) == 0u) {
3311 return CanEncodeConstantAsImmediate(High32Bits(value), opcode, kCcDontCare);
3312 }
3313 high_opcode = ADC;
3314 low_set_cc = kCcSet;
3315 break;
3316 default:
3317 break;
3318 }
3319 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode, low_set_cc) &&
3320 CanEncodeConstantAsImmediate(High32Bits(value), high_opcode, kCcDontCare);
3321 } else {
3322 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode);
3323 }
3324}
3325
3326// TODO(VIXL): Replace art::arm::SetCc` with `vixl32::FlagsUpdate after flags set optimization
3327// enabled.
3328bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(uint32_t value,
3329 Opcode opcode,
3330 SetCc set_cc) {
3331 ArmVIXLAssembler* assembler = codegen_->GetAssembler();
3332 if (assembler->ShifterOperandCanHold(opcode, value, set_cc)) {
3333 return true;
3334 }
3335 Opcode neg_opcode = kNoOperand;
3336 switch (opcode) {
3337 case AND: neg_opcode = BIC; value = ~value; break;
3338 case ORR: neg_opcode = ORN; value = ~value; break;
3339 case ADD: neg_opcode = SUB; value = -value; break;
3340 case ADC: neg_opcode = SBC; value = ~value; break;
3341 case SUB: neg_opcode = ADD; value = -value; break;
3342 case SBC: neg_opcode = ADC; value = ~value; break;
3343 default:
3344 return false;
3345 }
3346 return assembler->ShifterOperandCanHold(neg_opcode, value, set_cc);
3347}
3348
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003349void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction,
3350 const FieldInfo& field_info) {
3351 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3352
3353 LocationSummary* locations = instruction->GetLocations();
3354 vixl32::Register base = InputRegisterAt(instruction, 0);
3355 Location out = locations->Out();
3356 bool is_volatile = field_info.IsVolatile();
3357 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3358 Primitive::Type field_type = field_info.GetFieldType();
3359 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3360
3361 switch (field_type) {
3362 case Primitive::kPrimBoolean:
3363 GetAssembler()->LoadFromOffset(kLoadUnsignedByte, RegisterFrom(out), base, offset);
3364 break;
3365
3366 case Primitive::kPrimByte:
3367 GetAssembler()->LoadFromOffset(kLoadSignedByte, RegisterFrom(out), base, offset);
3368 break;
3369
3370 case Primitive::kPrimShort:
3371 GetAssembler()->LoadFromOffset(kLoadSignedHalfword, RegisterFrom(out), base, offset);
3372 break;
3373
3374 case Primitive::kPrimChar:
3375 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, RegisterFrom(out), base, offset);
3376 break;
3377
3378 case Primitive::kPrimInt:
3379 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
3380 break;
3381
3382 case Primitive::kPrimNot: {
3383 // /* HeapReference<Object> */ out = *(base + offset)
3384 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3385 TODO_VIXL32(FATAL);
3386 } else {
3387 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
3388 // TODO(VIXL): Scope to guarantee the position immediately after the load.
3389 codegen_->MaybeRecordImplicitNullCheck(instruction);
3390 if (is_volatile) {
3391 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3392 }
3393 // If read barriers are enabled, emit read barriers other than
3394 // Baker's using a slow path (and also unpoison the loaded
3395 // reference, if heap poisoning is enabled).
3396 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
3397 }
3398 break;
3399 }
3400
3401 case Primitive::kPrimLong:
3402 if (is_volatile && !atomic_ldrd_strd) {
3403 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
3404 } else {
3405 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
3406 }
3407 break;
3408
3409 case Primitive::kPrimFloat:
3410 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
3411 break;
3412
3413 case Primitive::kPrimDouble: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01003414 vixl32::DRegister out_dreg = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003415 if (is_volatile && !atomic_ldrd_strd) {
3416 vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
3417 vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
3418 GenerateWideAtomicLoad(base, offset, lo, hi);
3419 // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a
3420 // scope.
3421 codegen_->MaybeRecordImplicitNullCheck(instruction);
3422 __ Vmov(out_dreg, lo, hi);
3423 } else {
3424 GetAssembler()->LoadDFromOffset(out_dreg, base, offset);
3425 // TODO(VIXL): Scope to guarantee the position immediately after the load.
3426 codegen_->MaybeRecordImplicitNullCheck(instruction);
3427 }
3428 break;
3429 }
3430
3431 case Primitive::kPrimVoid:
3432 LOG(FATAL) << "Unreachable type " << field_type;
3433 UNREACHABLE();
3434 }
3435
3436 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimDouble) {
3437 // Potential implicit null checks, in the case of reference or
3438 // double fields, are handled in the previous switch statement.
3439 } else {
3440 // Address cases other than reference and double that may require an implicit null check.
3441 codegen_->MaybeRecordImplicitNullCheck(instruction);
3442 }
3443
3444 if (is_volatile) {
3445 if (field_type == Primitive::kPrimNot) {
3446 // Memory barriers, in the case of references, are also handled
3447 // in the previous switch statement.
3448 } else {
3449 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3450 }
3451 }
3452}
3453
3454void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3455 HandleFieldSet(instruction, instruction->GetFieldInfo());
3456}
3457
3458void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3459 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3460}
3461
3462void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3463 HandleFieldGet(instruction, instruction->GetFieldInfo());
3464}
3465
3466void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3467 HandleFieldGet(instruction, instruction->GetFieldInfo());
3468}
3469
3470void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3471 HandleFieldGet(instruction, instruction->GetFieldInfo());
3472}
3473
3474void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3475 HandleFieldGet(instruction, instruction->GetFieldInfo());
3476}
3477
Scott Wakelingc34dba72016-10-03 10:14:44 +01003478void LocationsBuilderARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3479 HandleFieldSet(instruction, instruction->GetFieldInfo());
3480}
3481
3482void InstructionCodeGeneratorARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3483 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3484}
3485
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003486void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) {
3487 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
3488 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3489 ? LocationSummary::kCallOnSlowPath
3490 : LocationSummary::kNoCall;
3491 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3492 locations->SetInAt(0, Location::RequiresRegister());
3493 if (instruction->HasUses()) {
3494 locations->SetOut(Location::SameAsFirstInput());
3495 }
3496}
3497
3498void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
3499 if (CanMoveNullCheckToUser(instruction)) {
3500 return;
3501 }
3502
3503 UseScratchRegisterScope temps(GetVIXLAssembler());
3504 AssemblerAccurateScope aas(GetVIXLAssembler(),
3505 kArmInstrMaxSizeInBytes,
3506 CodeBufferCheckScope::kMaximumSize);
3507 __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
3508 RecordPcInfo(instruction, instruction->GetDexPc());
3509}
3510
3511void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
3512 NullCheckSlowPathARMVIXL* slow_path =
3513 new (GetGraph()->GetArena()) NullCheckSlowPathARMVIXL(instruction);
3514 AddSlowPath(slow_path);
3515 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3516}
3517
3518void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) {
3519 codegen_->GenerateNullCheck(instruction);
3520}
3521
Scott Wakelingc34dba72016-10-03 10:14:44 +01003522static LoadOperandType GetLoadOperandType(Primitive::Type type) {
3523 switch (type) {
3524 case Primitive::kPrimNot:
3525 return kLoadWord;
3526 case Primitive::kPrimBoolean:
3527 return kLoadUnsignedByte;
3528 case Primitive::kPrimByte:
3529 return kLoadSignedByte;
3530 case Primitive::kPrimChar:
3531 return kLoadUnsignedHalfword;
3532 case Primitive::kPrimShort:
3533 return kLoadSignedHalfword;
3534 case Primitive::kPrimInt:
3535 return kLoadWord;
3536 case Primitive::kPrimLong:
3537 return kLoadWordPair;
3538 case Primitive::kPrimFloat:
3539 return kLoadSWord;
3540 case Primitive::kPrimDouble:
3541 return kLoadDWord;
3542 default:
3543 LOG(FATAL) << "Unreachable type " << type;
3544 UNREACHABLE();
3545 }
3546}
3547
3548static StoreOperandType GetStoreOperandType(Primitive::Type type) {
3549 switch (type) {
3550 case Primitive::kPrimNot:
3551 return kStoreWord;
3552 case Primitive::kPrimBoolean:
3553 case Primitive::kPrimByte:
3554 return kStoreByte;
3555 case Primitive::kPrimChar:
3556 case Primitive::kPrimShort:
3557 return kStoreHalfword;
3558 case Primitive::kPrimInt:
3559 return kStoreWord;
3560 case Primitive::kPrimLong:
3561 return kStoreWordPair;
3562 case Primitive::kPrimFloat:
3563 return kStoreSWord;
3564 case Primitive::kPrimDouble:
3565 return kStoreDWord;
3566 default:
3567 LOG(FATAL) << "Unreachable type " << type;
3568 UNREACHABLE();
3569 }
3570}
3571
3572void CodeGeneratorARMVIXL::LoadFromShiftedRegOffset(Primitive::Type type,
3573 Location out_loc,
3574 vixl32::Register base,
3575 vixl32::Register reg_index,
3576 vixl32::Condition cond) {
3577 uint32_t shift_count = Primitive::ComponentSizeShift(type);
3578 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
3579
3580 switch (type) {
3581 case Primitive::kPrimByte:
3582 __ Ldrsb(cond, RegisterFrom(out_loc), mem_address);
3583 break;
3584 case Primitive::kPrimBoolean:
3585 __ Ldrb(cond, RegisterFrom(out_loc), mem_address);
3586 break;
3587 case Primitive::kPrimShort:
3588 __ Ldrsh(cond, RegisterFrom(out_loc), mem_address);
3589 break;
3590 case Primitive::kPrimChar:
3591 __ Ldrh(cond, RegisterFrom(out_loc), mem_address);
3592 break;
3593 case Primitive::kPrimNot:
3594 case Primitive::kPrimInt:
3595 __ Ldr(cond, RegisterFrom(out_loc), mem_address);
3596 break;
3597 // T32 doesn't support LoadFromShiftedRegOffset mem address mode for these types.
3598 case Primitive::kPrimLong:
3599 case Primitive::kPrimFloat:
3600 case Primitive::kPrimDouble:
3601 default:
3602 LOG(FATAL) << "Unreachable type " << type;
3603 UNREACHABLE();
3604 }
3605}
3606
3607void CodeGeneratorARMVIXL::StoreToShiftedRegOffset(Primitive::Type type,
3608 Location loc,
3609 vixl32::Register base,
3610 vixl32::Register reg_index,
3611 vixl32::Condition cond) {
3612 uint32_t shift_count = Primitive::ComponentSizeShift(type);
3613 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
3614
3615 switch (type) {
3616 case Primitive::kPrimByte:
3617 case Primitive::kPrimBoolean:
3618 __ Strb(cond, RegisterFrom(loc), mem_address);
3619 break;
3620 case Primitive::kPrimShort:
3621 case Primitive::kPrimChar:
3622 __ Strh(cond, RegisterFrom(loc), mem_address);
3623 break;
3624 case Primitive::kPrimNot:
3625 case Primitive::kPrimInt:
3626 __ Str(cond, RegisterFrom(loc), mem_address);
3627 break;
3628 // T32 doesn't support StoreToShiftedRegOffset mem address mode for these types.
3629 case Primitive::kPrimLong:
3630 case Primitive::kPrimFloat:
3631 case Primitive::kPrimDouble:
3632 default:
3633 LOG(FATAL) << "Unreachable type " << type;
3634 UNREACHABLE();
3635 }
3636}
3637
3638void LocationsBuilderARMVIXL::VisitArrayGet(HArrayGet* instruction) {
3639 bool object_array_get_with_read_barrier =
3640 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
3641 LocationSummary* locations =
3642 new (GetGraph()->GetArena()) LocationSummary(instruction,
3643 object_array_get_with_read_barrier ?
3644 LocationSummary::kCallOnSlowPath :
3645 LocationSummary::kNoCall);
3646 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
3647 TODO_VIXL32(FATAL);
3648 }
3649 locations->SetInAt(0, Location::RequiresRegister());
3650 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3651 if (Primitive::IsFloatingPointType(instruction->GetType())) {
3652 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3653 } else {
3654 // The output overlaps in the case of an object array get with
3655 // read barriers enabled: we do not want the move to overwrite the
3656 // array's location, as we need it to emit the read barrier.
3657 locations->SetOut(
3658 Location::RequiresRegister(),
3659 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
3660 }
3661 // We need a temporary register for the read barrier marking slow
3662 // path in CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier.
3663 // Also need for String compression feature.
3664 if ((object_array_get_with_read_barrier && kUseBakerReadBarrier)
3665 || (mirror::kUseStringCompression && instruction->IsStringCharAt())) {
3666 TODO_VIXL32(FATAL);
3667 }
3668}
3669
3670void InstructionCodeGeneratorARMVIXL::VisitArrayGet(HArrayGet* instruction) {
3671 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
3672 LocationSummary* locations = instruction->GetLocations();
3673 Location obj_loc = locations->InAt(0);
3674 vixl32::Register obj = InputRegisterAt(instruction, 0);
3675 Location index = locations->InAt(1);
3676 Location out_loc = locations->Out();
3677 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
3678 Primitive::Type type = instruction->GetType();
3679 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
3680 instruction->IsStringCharAt();
3681 HInstruction* array_instr = instruction->GetArray();
3682 bool has_intermediate_address = array_instr->IsIntermediateAddress();
3683 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
3684 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
3685
3686 switch (type) {
3687 case Primitive::kPrimBoolean:
3688 case Primitive::kPrimByte:
3689 case Primitive::kPrimShort:
3690 case Primitive::kPrimChar:
3691 case Primitive::kPrimInt: {
3692 if (index.IsConstant()) {
3693 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
3694 if (maybe_compressed_char_at) {
3695 TODO_VIXL32(FATAL);
3696 } else {
3697 uint32_t full_offset = data_offset + (const_index << Primitive::ComponentSizeShift(type));
3698
3699 LoadOperandType load_type = GetLoadOperandType(type);
3700 GetAssembler()->LoadFromOffset(load_type, RegisterFrom(out_loc), obj, full_offset);
3701 }
3702 } else {
3703 vixl32::Register temp = temps.Acquire();
3704
3705 if (has_intermediate_address) {
3706 TODO_VIXL32(FATAL);
3707 } else {
3708 __ Add(temp, obj, data_offset);
3709 }
3710 if (maybe_compressed_char_at) {
3711 TODO_VIXL32(FATAL);
3712 } else {
3713 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
3714 }
3715 }
3716 break;
3717 }
3718
3719 case Primitive::kPrimNot: {
3720 static_assert(
3721 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
3722 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
3723 // /* HeapReference<Object> */ out =
3724 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
3725 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3726 TODO_VIXL32(FATAL);
3727 } else {
3728 vixl32::Register out = OutputRegister(instruction);
3729 if (index.IsConstant()) {
3730 size_t offset =
3731 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3732 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
3733 codegen_->MaybeRecordImplicitNullCheck(instruction);
3734 // If read barriers are enabled, emit read barriers other than
3735 // Baker's using a slow path (and also unpoison the loaded
3736 // reference, if heap poisoning is enabled).
3737 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
3738 } else {
3739 vixl32::Register temp = temps.Acquire();
3740
3741 if (has_intermediate_address) {
3742 TODO_VIXL32(FATAL);
3743 } else {
3744 __ Add(temp, obj, data_offset);
3745 }
3746 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
3747
3748 codegen_->MaybeRecordImplicitNullCheck(instruction);
3749 // If read barriers are enabled, emit read barriers other than
3750 // Baker's using a slow path (and also unpoison the loaded
3751 // reference, if heap poisoning is enabled).
3752 codegen_->MaybeGenerateReadBarrierSlow(
3753 instruction, out_loc, out_loc, obj_loc, data_offset, index);
3754 }
3755 }
3756 break;
3757 }
3758
3759 case Primitive::kPrimLong: {
3760 if (index.IsConstant()) {
3761 size_t offset =
3762 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3763 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), obj, offset);
3764 } else {
3765 vixl32::Register temp = temps.Acquire();
3766 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
3767 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), temp, data_offset);
3768 }
3769 break;
3770 }
3771
3772 case Primitive::kPrimFloat: {
3773 vixl32::SRegister out = SRegisterFrom(out_loc);
3774 if (index.IsConstant()) {
3775 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3776 GetAssembler()->LoadSFromOffset(out, obj, offset);
3777 } else {
3778 vixl32::Register temp = temps.Acquire();
3779 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
3780 GetAssembler()->LoadSFromOffset(out, temp, data_offset);
3781 }
3782 break;
3783 }
3784
3785 case Primitive::kPrimDouble: {
3786 if (index.IsConstant()) {
3787 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3788 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), obj, offset);
3789 } else {
3790 vixl32::Register temp = temps.Acquire();
3791 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
3792 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), temp, data_offset);
3793 }
3794 break;
3795 }
3796
3797 case Primitive::kPrimVoid:
3798 LOG(FATAL) << "Unreachable type " << type;
3799 UNREACHABLE();
3800 }
3801
3802 if (type == Primitive::kPrimNot) {
3803 // Potential implicit null checks, in the case of reference
3804 // arrays, are handled in the previous switch statement.
3805 } else if (!maybe_compressed_char_at) {
3806 codegen_->MaybeRecordImplicitNullCheck(instruction);
3807 }
3808}
3809
3810void LocationsBuilderARMVIXL::VisitArraySet(HArraySet* instruction) {
3811 Primitive::Type value_type = instruction->GetComponentType();
3812
3813 bool needs_write_barrier =
3814 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3815 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
3816
3817 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3818 instruction,
3819 may_need_runtime_call_for_type_check ?
3820 LocationSummary::kCallOnSlowPath :
3821 LocationSummary::kNoCall);
3822
3823 locations->SetInAt(0, Location::RequiresRegister());
3824 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3825 if (Primitive::IsFloatingPointType(value_type)) {
3826 locations->SetInAt(2, Location::RequiresFpuRegister());
3827 } else {
3828 locations->SetInAt(2, Location::RequiresRegister());
3829 }
3830 if (needs_write_barrier) {
3831 // Temporary registers for the write barrier.
3832 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
3833 locations->AddTemp(Location::RequiresRegister());
3834 }
3835}
3836
3837void InstructionCodeGeneratorARMVIXL::VisitArraySet(HArraySet* instruction) {
3838 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
3839 LocationSummary* locations = instruction->GetLocations();
3840 vixl32::Register array = InputRegisterAt(instruction, 0);
3841 Location index = locations->InAt(1);
3842 Primitive::Type value_type = instruction->GetComponentType();
3843 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
3844 bool needs_write_barrier =
3845 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3846 uint32_t data_offset =
3847 mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
3848 Location value_loc = locations->InAt(2);
3849 HInstruction* array_instr = instruction->GetArray();
3850 bool has_intermediate_address = array_instr->IsIntermediateAddress();
3851 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
3852 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
3853
3854 switch (value_type) {
3855 case Primitive::kPrimBoolean:
3856 case Primitive::kPrimByte:
3857 case Primitive::kPrimShort:
3858 case Primitive::kPrimChar:
3859 case Primitive::kPrimInt: {
3860 if (index.IsConstant()) {
3861 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
3862 uint32_t full_offset =
3863 data_offset + (const_index << Primitive::ComponentSizeShift(value_type));
3864 StoreOperandType store_type = GetStoreOperandType(value_type);
3865 GetAssembler()->StoreToOffset(store_type, RegisterFrom(value_loc), array, full_offset);
3866 } else {
3867 vixl32::Register temp = temps.Acquire();
3868
3869 if (has_intermediate_address) {
3870 TODO_VIXL32(FATAL);
3871 } else {
3872 __ Add(temp, array, data_offset);
3873 }
3874 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
3875 }
3876 break;
3877 }
3878
3879 case Primitive::kPrimNot: {
3880 vixl32::Register value = RegisterFrom(value_loc);
3881 // TryExtractArrayAccessAddress optimization is never applied for non-primitive ArraySet.
3882 // See the comment in instruction_simplifier_shared.cc.
3883 DCHECK(!has_intermediate_address);
3884
3885 if (instruction->InputAt(2)->IsNullConstant()) {
3886 // Just setting null.
3887 if (index.IsConstant()) {
3888 size_t offset =
3889 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3890 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
3891 } else {
3892 DCHECK(index.IsRegister()) << index;
3893 vixl32::Register temp = temps.Acquire();
3894 __ Add(temp, array, data_offset);
3895 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
3896 }
3897 codegen_->MaybeRecordImplicitNullCheck(instruction);
3898 DCHECK(!needs_write_barrier);
3899 DCHECK(!may_need_runtime_call_for_type_check);
3900 break;
3901 }
3902
3903 DCHECK(needs_write_barrier);
3904 Location temp1_loc = locations->GetTemp(0);
3905 vixl32::Register temp1 = RegisterFrom(temp1_loc);
3906 Location temp2_loc = locations->GetTemp(1);
3907 vixl32::Register temp2 = RegisterFrom(temp2_loc);
3908 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3909 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3910 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3911 vixl32::Label done;
3912 SlowPathCodeARMVIXL* slow_path = nullptr;
3913
3914 if (may_need_runtime_call_for_type_check) {
3915 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARMVIXL(instruction);
3916 codegen_->AddSlowPath(slow_path);
3917 if (instruction->GetValueCanBeNull()) {
3918 vixl32::Label non_zero;
3919 __ Cbnz(value, &non_zero);
3920 if (index.IsConstant()) {
3921 size_t offset =
3922 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3923 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
3924 } else {
3925 DCHECK(index.IsRegister()) << index;
3926 vixl32::Register temp = temps.Acquire();
3927 __ Add(temp, array, data_offset);
3928 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
3929 }
3930 codegen_->MaybeRecordImplicitNullCheck(instruction);
3931 __ B(&done);
3932 __ Bind(&non_zero);
3933 }
3934
3935 // Note that when read barriers are enabled, the type checks
3936 // are performed without read barriers. This is fine, even in
3937 // the case where a class object is in the from-space after
3938 // the flip, as a comparison involving such a type would not
3939 // produce a false positive; it may of course produce a false
3940 // negative, in which case we would take the ArraySet slow
3941 // path.
3942
3943 // /* HeapReference<Class> */ temp1 = array->klass_
3944 GetAssembler()->LoadFromOffset(kLoadWord, temp1, array, class_offset);
3945 codegen_->MaybeRecordImplicitNullCheck(instruction);
3946 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
3947
3948 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3949 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
3950 // /* HeapReference<Class> */ temp2 = value->klass_
3951 GetAssembler()->LoadFromOffset(kLoadWord, temp2, value, class_offset);
3952 // If heap poisoning is enabled, no need to unpoison `temp1`
3953 // nor `temp2`, as we are comparing two poisoned references.
3954 __ Cmp(temp1, temp2);
3955
3956 if (instruction->StaticTypeOfArrayIsObjectArray()) {
3957 vixl32::Label do_put;
3958 __ B(eq, &do_put);
3959 // If heap poisoning is enabled, the `temp1` reference has
3960 // not been unpoisoned yet; unpoison it now.
3961 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
3962
3963 // /* HeapReference<Class> */ temp1 = temp1->super_class_
3964 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
3965 // If heap poisoning is enabled, no need to unpoison
3966 // `temp1`, as we are comparing against null below.
3967 __ Cbnz(temp1, slow_path->GetEntryLabel());
3968 __ Bind(&do_put);
3969 } else {
3970 __ B(ne, slow_path->GetEntryLabel());
3971 }
3972 }
3973
3974 vixl32::Register source = value;
3975 if (kPoisonHeapReferences) {
3976 // Note that in the case where `value` is a null reference,
3977 // we do not enter this block, as a null reference does not
3978 // need poisoning.
3979 DCHECK_EQ(value_type, Primitive::kPrimNot);
3980 __ Mov(temp1, value);
3981 GetAssembler()->PoisonHeapReference(temp1);
3982 source = temp1;
3983 }
3984
3985 if (index.IsConstant()) {
3986 size_t offset =
3987 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3988 GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
3989 } else {
3990 DCHECK(index.IsRegister()) << index;
3991
3992 vixl32::Register temp = temps.Acquire();
3993 __ Add(temp, array, data_offset);
3994 codegen_->StoreToShiftedRegOffset(value_type,
3995 LocationFrom(source),
3996 temp,
3997 RegisterFrom(index));
3998 }
3999
4000 if (!may_need_runtime_call_for_type_check) {
4001 codegen_->MaybeRecordImplicitNullCheck(instruction);
4002 }
4003
4004 codegen_->MarkGCCard(temp1, temp2, array, value, instruction->GetValueCanBeNull());
4005
4006 if (done.IsReferenced()) {
4007 __ Bind(&done);
4008 }
4009
4010 if (slow_path != nullptr) {
4011 __ Bind(slow_path->GetExitLabel());
4012 }
4013
4014 break;
4015 }
4016
4017 case Primitive::kPrimLong: {
4018 Location value = locations->InAt(2);
4019 if (index.IsConstant()) {
4020 size_t offset =
4021 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4022 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), array, offset);
4023 } else {
4024 vixl32::Register temp = temps.Acquire();
4025 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4026 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), temp, data_offset);
4027 }
4028 break;
4029 }
4030
4031 case Primitive::kPrimFloat: {
4032 Location value = locations->InAt(2);
4033 DCHECK(value.IsFpuRegister());
4034 if (index.IsConstant()) {
4035 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4036 GetAssembler()->StoreSToOffset(SRegisterFrom(value), array, offset);
4037 } else {
4038 vixl32::Register temp = temps.Acquire();
4039 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
4040 GetAssembler()->StoreSToOffset(SRegisterFrom(value), temp, data_offset);
4041 }
4042 break;
4043 }
4044
4045 case Primitive::kPrimDouble: {
4046 Location value = locations->InAt(2);
4047 DCHECK(value.IsFpuRegisterPair());
4048 if (index.IsConstant()) {
4049 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4050 GetAssembler()->StoreDToOffset(DRegisterFrom(value), array, offset);
4051 } else {
4052 vixl32::Register temp = temps.Acquire();
4053 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4054 GetAssembler()->StoreDToOffset(DRegisterFrom(value), temp, data_offset);
4055 }
4056 break;
4057 }
4058
4059 case Primitive::kPrimVoid:
4060 LOG(FATAL) << "Unreachable type " << value_type;
4061 UNREACHABLE();
4062 }
4063
4064 // Objects are handled in the switch.
4065 if (value_type != Primitive::kPrimNot) {
4066 codegen_->MaybeRecordImplicitNullCheck(instruction);
4067 }
4068}
4069
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004070void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {
4071 LocationSummary* locations =
4072 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4073 locations->SetInAt(0, Location::RequiresRegister());
4074 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4075}
4076
4077void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) {
4078 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
4079 vixl32::Register obj = InputRegisterAt(instruction, 0);
4080 vixl32::Register out = OutputRegister(instruction);
4081 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
4082 codegen_->MaybeRecordImplicitNullCheck(instruction);
4083 // TODO(VIXL): https://android-review.googlesource.com/#/c/272625/
4084}
4085
Scott Wakelingc34dba72016-10-03 10:14:44 +01004086void LocationsBuilderARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
4087 RegisterSet caller_saves = RegisterSet::Empty();
4088 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4089 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
4090 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(1)));
4091 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
4092 locations->SetInAt(0, Location::RequiresRegister());
4093 locations->SetInAt(1, Location::RequiresRegister());
4094}
4095
4096void InstructionCodeGeneratorARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
4097 SlowPathCodeARMVIXL* slow_path =
4098 new (GetGraph()->GetArena()) BoundsCheckSlowPathARMVIXL(instruction);
4099 codegen_->AddSlowPath(slow_path);
4100
4101 vixl32::Register index = InputRegisterAt(instruction, 0);
4102 vixl32::Register length = InputRegisterAt(instruction, 1);
4103
4104 __ Cmp(index, length);
4105 __ B(hs, slow_path->GetEntryLabel());
4106}
4107
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004108void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp,
4109 vixl32::Register card,
4110 vixl32::Register object,
4111 vixl32::Register value,
4112 bool can_be_null) {
4113 vixl32::Label is_null;
4114 if (can_be_null) {
4115 __ Cbz(value, &is_null);
4116 }
4117 GetAssembler()->LoadFromOffset(
4118 kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
4119 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
4120 __ Strb(card, MemOperand(card, temp));
4121 if (can_be_null) {
4122 __ Bind(&is_null);
4123 }
4124}
4125
Scott Wakelingfe885462016-09-22 10:24:38 +01004126void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4127 LOG(FATAL) << "Unreachable";
4128}
4129
4130void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) {
4131 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4132}
4133
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004134void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
4135 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4136 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ and related.
4137}
4138
4139void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
4140 HBasicBlock* block = instruction->GetBlock();
4141 if (block->GetLoopInformation() != nullptr) {
4142 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4143 // The back edge will generate the suspend check.
4144 return;
4145 }
4146 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4147 // The goto will generate the suspend check.
4148 return;
4149 }
4150 GenerateSuspendCheck(instruction, nullptr);
4151}
4152
4153void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction,
4154 HBasicBlock* successor) {
4155 SuspendCheckSlowPathARMVIXL* slow_path =
4156 down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath());
4157 if (slow_path == nullptr) {
4158 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARMVIXL(instruction, successor);
4159 instruction->SetSlowPath(slow_path);
4160 codegen_->AddSlowPath(slow_path);
4161 if (successor != nullptr) {
4162 DCHECK(successor->IsLoopHeader());
4163 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
4164 }
4165 } else {
4166 DCHECK_EQ(slow_path->GetSuccessor(), successor);
4167 }
4168
4169 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4170 vixl32::Register temp = temps.Acquire();
4171 GetAssembler()->LoadFromOffset(
4172 kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
4173 if (successor == nullptr) {
4174 __ Cbnz(temp, slow_path->GetEntryLabel());
4175 __ Bind(slow_path->GetReturnLabel());
4176 } else {
4177 __ Cbz(temp, codegen_->GetLabelOf(successor));
4178 __ B(slow_path->GetEntryLabel());
4179 }
4180}
4181
Scott Wakelingfe885462016-09-22 10:24:38 +01004182ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const {
4183 return codegen_->GetAssembler();
4184}
4185
4186void ParallelMoveResolverARMVIXL::EmitMove(size_t index) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004187 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Scott Wakelingfe885462016-09-22 10:24:38 +01004188 MoveOperands* move = moves_[index];
4189 Location source = move->GetSource();
4190 Location destination = move->GetDestination();
4191
4192 if (source.IsRegister()) {
4193 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004194 __ Mov(RegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004195 } else if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004196 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004197 } else {
4198 DCHECK(destination.IsStackSlot());
4199 GetAssembler()->StoreToOffset(kStoreWord,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004200 RegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01004201 sp,
4202 destination.GetStackIndex());
4203 }
4204 } else if (source.IsStackSlot()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004205 if (destination.IsRegister()) {
4206 GetAssembler()->LoadFromOffset(kLoadWord,
4207 RegisterFrom(destination),
4208 sp,
4209 source.GetStackIndex());
4210 } else if (destination.IsFpuRegister()) {
4211 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
4212 } else {
4213 DCHECK(destination.IsStackSlot());
4214 vixl32::Register temp = temps.Acquire();
4215 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
4216 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4217 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004218 } else if (source.IsFpuRegister()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004219 if (destination.IsRegister()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004220 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004221 } else if (destination.IsFpuRegister()) {
4222 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
4223 } else {
4224 DCHECK(destination.IsStackSlot());
4225 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
4226 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004227 } else if (source.IsDoubleStackSlot()) {
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004228 if (destination.IsDoubleStackSlot()) {
4229 vixl32::DRegister temp = temps.AcquireD();
4230 GetAssembler()->LoadDFromOffset(temp, sp, source.GetStackIndex());
4231 GetAssembler()->StoreDToOffset(temp, sp, destination.GetStackIndex());
4232 } else if (destination.IsRegisterPair()) {
4233 DCHECK(ExpectedPairLayout(destination));
4234 GetAssembler()->LoadFromOffset(
4235 kLoadWordPair, LowRegisterFrom(destination), sp, source.GetStackIndex());
4236 } else {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004237 DCHECK(destination.IsFpuRegisterPair()) << destination;
4238 GetAssembler()->LoadDFromOffset(DRegisterFrom(destination), sp, source.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004239 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004240 } else if (source.IsRegisterPair()) {
4241 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004242 __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source));
4243 __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004244 } else if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004245 __ Vmov(DRegisterFrom(destination), LowRegisterFrom(source), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004246 } else {
4247 DCHECK(destination.IsDoubleStackSlot()) << destination;
4248 DCHECK(ExpectedPairLayout(source));
4249 GetAssembler()->StoreToOffset(kStoreWordPair,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004250 LowRegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01004251 sp,
4252 destination.GetStackIndex());
4253 }
4254 } else if (source.IsFpuRegisterPair()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004255 if (destination.IsRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004256 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), DRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004257 } else if (destination.IsFpuRegisterPair()) {
4258 __ Vmov(DRegisterFrom(destination), DRegisterFrom(source));
4259 } else {
4260 DCHECK(destination.IsDoubleStackSlot()) << destination;
4261 GetAssembler()->StoreDToOffset(DRegisterFrom(source), sp, destination.GetStackIndex());
4262 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004263 } else {
4264 DCHECK(source.IsConstant()) << source;
4265 HConstant* constant = source.GetConstant();
4266 if (constant->IsIntConstant() || constant->IsNullConstant()) {
4267 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
4268 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004269 __ Mov(RegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004270 } else {
4271 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01004272 vixl32::Register temp = temps.Acquire();
4273 __ Mov(temp, value);
4274 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4275 }
4276 } else if (constant->IsLongConstant()) {
4277 int64_t value = constant->AsLongConstant()->GetValue();
4278 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004279 __ Mov(LowRegisterFrom(destination), Low32Bits(value));
4280 __ Mov(HighRegisterFrom(destination), High32Bits(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004281 } else {
4282 DCHECK(destination.IsDoubleStackSlot()) << destination;
Scott Wakelingfe885462016-09-22 10:24:38 +01004283 vixl32::Register temp = temps.Acquire();
4284 __ Mov(temp, Low32Bits(value));
4285 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4286 __ Mov(temp, High32Bits(value));
4287 GetAssembler()->StoreToOffset(kStoreWord,
4288 temp,
4289 sp,
4290 destination.GetHighStackIndex(kArmWordSize));
4291 }
4292 } else if (constant->IsDoubleConstant()) {
4293 double value = constant->AsDoubleConstant()->GetValue();
4294 if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004295 __ Vmov(DRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004296 } else {
4297 DCHECK(destination.IsDoubleStackSlot()) << destination;
4298 uint64_t int_value = bit_cast<uint64_t, double>(value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004299 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004300 __ Mov(temp, Low32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004301 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004302 __ Mov(temp, High32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004303 GetAssembler()->StoreToOffset(kStoreWord,
4304 temp,
4305 sp,
4306 destination.GetHighStackIndex(kArmWordSize));
4307 }
4308 } else {
4309 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
4310 float value = constant->AsFloatConstant()->GetValue();
4311 if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004312 __ Vmov(SRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004313 } else {
4314 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01004315 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004316 __ Mov(temp, bit_cast<int32_t, float>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004317 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4318 }
4319 }
4320 }
4321}
4322
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004323void ParallelMoveResolverARMVIXL::Exchange(vixl32::Register reg, int mem) {
4324 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4325 vixl32::Register temp = temps.Acquire();
4326 __ Mov(temp, reg);
4327 GetAssembler()->LoadFromOffset(kLoadWord, reg, sp, mem);
4328 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Scott Wakelingfe885462016-09-22 10:24:38 +01004329}
4330
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004331void ParallelMoveResolverARMVIXL::Exchange(int mem1, int mem2) {
4332 // TODO(VIXL32): Double check the performance of this implementation.
4333 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4334 vixl32::Register temp = temps.Acquire();
4335 vixl32::SRegister temp_s = temps.AcquireS();
4336
4337 __ Ldr(temp, MemOperand(sp, mem1));
4338 __ Vldr(temp_s, MemOperand(sp, mem2));
4339 __ Str(temp, MemOperand(sp, mem2));
4340 __ Vstr(temp_s, MemOperand(sp, mem1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004341}
4342
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004343void ParallelMoveResolverARMVIXL::EmitSwap(size_t index) {
4344 MoveOperands* move = moves_[index];
4345 Location source = move->GetSource();
4346 Location destination = move->GetDestination();
4347 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4348
4349 if (source.IsRegister() && destination.IsRegister()) {
4350 vixl32::Register temp = temps.Acquire();
4351 DCHECK(!RegisterFrom(source).Is(temp));
4352 DCHECK(!RegisterFrom(destination).Is(temp));
4353 __ Mov(temp, RegisterFrom(destination));
4354 __ Mov(RegisterFrom(destination), RegisterFrom(source));
4355 __ Mov(RegisterFrom(source), temp);
4356 } else if (source.IsRegister() && destination.IsStackSlot()) {
4357 Exchange(RegisterFrom(source), destination.GetStackIndex());
4358 } else if (source.IsStackSlot() && destination.IsRegister()) {
4359 Exchange(RegisterFrom(destination), source.GetStackIndex());
4360 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
4361 TODO_VIXL32(FATAL);
4362 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
4363 TODO_VIXL32(FATAL);
4364 } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
4365 vixl32::DRegister temp = temps.AcquireD();
4366 __ Vmov(temp, LowRegisterFrom(source), HighRegisterFrom(source));
4367 __ Mov(LowRegisterFrom(source), LowRegisterFrom(destination));
4368 __ Mov(HighRegisterFrom(source), HighRegisterFrom(destination));
4369 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), temp);
4370 } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
4371 vixl32::Register low_reg = LowRegisterFrom(source.IsRegisterPair() ? source : destination);
4372 int mem = source.IsRegisterPair() ? destination.GetStackIndex() : source.GetStackIndex();
4373 DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
4374 vixl32::DRegister temp = temps.AcquireD();
4375 __ Vmov(temp, low_reg, vixl32::Register(low_reg.GetCode() + 1));
4376 GetAssembler()->LoadFromOffset(kLoadWordPair, low_reg, sp, mem);
4377 GetAssembler()->StoreDToOffset(temp, sp, mem);
4378 } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
4379 TODO_VIXL32(FATAL);
4380 } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
4381 TODO_VIXL32(FATAL);
4382 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
4383 TODO_VIXL32(FATAL);
4384 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
4385 vixl32::DRegister temp1 = temps.AcquireD();
4386 vixl32::DRegister temp2 = temps.AcquireD();
4387 __ Vldr(temp1, MemOperand(sp, source.GetStackIndex()));
4388 __ Vldr(temp2, MemOperand(sp, destination.GetStackIndex()));
4389 __ Vstr(temp1, MemOperand(sp, destination.GetStackIndex()));
4390 __ Vstr(temp2, MemOperand(sp, source.GetStackIndex()));
4391 } else {
4392 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
4393 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004394}
4395
4396void ParallelMoveResolverARMVIXL::SpillScratch(int reg ATTRIBUTE_UNUSED) {
4397 TODO_VIXL32(FATAL);
4398}
4399
4400void ParallelMoveResolverARMVIXL::RestoreScratch(int reg ATTRIBUTE_UNUSED) {
4401 TODO_VIXL32(FATAL);
4402}
4403
Artem Serov02d37832016-10-25 15:25:33 +01004404// Check if the desired_class_load_kind is supported. If it is, return it,
4405// otherwise return a fall-back kind that should be used instead.
4406HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
4407 HLoadClass::LoadKind desired_class_load_kind ATTRIBUTE_UNUSED) {
4408 // TODO(VIXL): Implement optimized code paths.
4409 return HLoadClass::LoadKind::kDexCacheViaMethod;
4410}
4411
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004412void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
4413 if (cls->NeedsAccessCheck()) {
4414 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4415 CodeGenerator::CreateLoadClassLocationSummary(
4416 cls,
4417 LocationFrom(calling_convention.GetRegisterAt(0)),
4418 LocationFrom(r0),
4419 /* code_generator_supports_read_barrier */ true);
4420 return;
4421 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004422
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004423 // TODO(VIXL): read barrier code.
4424 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
4425 ? LocationSummary::kCallOnSlowPath
4426 : LocationSummary::kNoCall;
4427 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
4428 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4429 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4430 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
4431 load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
4432 locations->SetInAt(0, Location::RequiresRegister());
4433 }
4434 locations->SetOut(Location::RequiresRegister());
4435}
4436
4437void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
4438 LocationSummary* locations = cls->GetLocations();
4439 if (cls->NeedsAccessCheck()) {
4440 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
4441 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
4442 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
4443 return;
4444 }
4445
4446 Location out_loc = locations->Out();
4447 vixl32::Register out = OutputRegister(cls);
4448
4449 // TODO(VIXL): read barrier code.
4450 bool generate_null_check = false;
4451 switch (cls->GetLoadKind()) {
4452 case HLoadClass::LoadKind::kReferrersClass: {
4453 DCHECK(!cls->CanCallRuntime());
4454 DCHECK(!cls->MustGenerateClinitCheck());
4455 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4456 vixl32::Register current_method = InputRegisterAt(cls, 0);
4457 GenerateGcRootFieldLoad(cls,
4458 out_loc,
4459 current_method,
Roland Levillain00468f32016-10-27 18:02:48 +01004460 ArtMethod::DeclaringClassOffset().Int32Value(),
4461 kEmitCompilerReadBarrier);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004462 break;
4463 }
4464 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4465 // /* GcRoot<mirror::Class>[] */ out =
4466 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4467 vixl32::Register current_method = InputRegisterAt(cls, 0);
4468 const int32_t resolved_types_offset =
4469 ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value();
4470 GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
4471 // /* GcRoot<mirror::Class> */ out = out[type_index]
4472 size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex());
Roland Levillain00468f32016-10-27 18:02:48 +01004473 GenerateGcRootFieldLoad(cls, out_loc, out, offset, kEmitCompilerReadBarrier);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004474 generate_null_check = !cls->IsInDexCache();
4475 break;
4476 }
4477 default:
4478 TODO_VIXL32(FATAL);
4479 }
4480
4481 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4482 DCHECK(cls->CanCallRuntime());
4483 LoadClassSlowPathARMVIXL* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(
4484 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4485 codegen_->AddSlowPath(slow_path);
4486 if (generate_null_check) {
4487 __ Cbz(out, slow_path->GetEntryLabel());
4488 }
4489 if (cls->MustGenerateClinitCheck()) {
4490 GenerateClassInitializationCheck(slow_path, out);
4491 } else {
4492 __ Bind(slow_path->GetExitLabel());
4493 }
4494 }
4495}
4496
Artem Serov02d37832016-10-25 15:25:33 +01004497void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) {
4498 LocationSummary* locations =
4499 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
4500 locations->SetInAt(0, Location::RequiresRegister());
4501 if (check->HasUses()) {
4502 locations->SetOut(Location::SameAsFirstInput());
4503 }
4504}
4505
4506void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) {
4507 // We assume the class is not null.
4508 LoadClassSlowPathARMVIXL* slow_path =
4509 new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(check->GetLoadClass(),
4510 check,
4511 check->GetDexPc(),
4512 /* do_clinit */ true);
4513 codegen_->AddSlowPath(slow_path);
4514 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
4515}
4516
4517void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck(
4518 LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) {
4519 UseScratchRegisterScope temps(GetVIXLAssembler());
4520 vixl32::Register temp = temps.Acquire();
4521 GetAssembler()->LoadFromOffset(kLoadWord,
4522 temp,
4523 class_reg,
4524 mirror::Class::StatusOffset().Int32Value());
4525 __ Cmp(temp, mirror::Class::kStatusInitialized);
4526 __ B(lt, slow_path->GetEntryLabel());
4527 // Even if the initialized flag is set, we may be in a situation where caches are not synced
4528 // properly. Therefore, we do a memory fence.
4529 __ Dmb(ISH);
4530 __ Bind(slow_path->GetExitLabel());
4531}
4532
4533// Check if the desired_string_load_kind is supported. If it is, return it,
4534// otherwise return a fall-back kind that should be used instead.
4535HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
4536 HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) {
4537 // TODO(VIXL): Implement optimized code paths. For now we always use the simpler fallback code.
4538 return HLoadString::LoadKind::kDexCacheViaMethod;
4539}
4540
4541void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
4542 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
4543 ? LocationSummary::kCallOnMainOnly
4544 : LocationSummary::kNoCall;
4545 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
4546
4547 // TODO(VIXL): Implement optimized code paths.
4548 // See InstructionCodeGeneratorARMVIXL::VisitLoadString.
4549 HLoadString::LoadKind load_kind = load->GetLoadKind();
4550 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
4551 locations->SetInAt(0, Location::RequiresRegister());
4552 // TODO(VIXL): Use InvokeRuntimeCallingConventionARMVIXL instead.
4553 locations->SetOut(LocationFrom(r0));
4554 } else {
4555 locations->SetOut(Location::RequiresRegister());
4556 }
4557}
4558
4559void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
4560 // TODO(VIXL): Implement optimized code paths.
4561 // We implemented the simplest solution to get first ART tests passing, we deferred the
4562 // optimized path until later, we should implement it using ARM64 implementation as a
4563 // reference. The same related to LocationsBuilderARMVIXL::VisitLoadString.
4564
4565 // TODO: Re-add the compiler code to do string dex cache lookup again.
4566 DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kDexCacheViaMethod);
4567 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4568 __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex());
4569 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4570 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
4571}
4572
4573static int32_t GetExceptionTlsOffset() {
4574 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
4575}
4576
4577void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) {
4578 LocationSummary* locations =
4579 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4580 locations->SetOut(Location::RequiresRegister());
4581}
4582
4583void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) {
4584 vixl32::Register out = OutputRegister(load);
4585 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
4586}
4587
4588
4589void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) {
4590 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4591}
4592
4593void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4594 UseScratchRegisterScope temps(GetVIXLAssembler());
4595 vixl32::Register temp = temps.Acquire();
4596 __ Mov(temp, 0);
4597 GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset());
4598}
4599
4600void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
4601 LocationSummary* locations =
4602 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
4603 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4604 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4605}
4606
4607void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
4608 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
4609 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
4610}
4611
Artem Serov02109dd2016-09-23 17:17:54 +01004612void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) {
4613 HandleBitwiseOperation(instruction, AND);
4614}
4615
4616void LocationsBuilderARMVIXL::VisitOr(HOr* instruction) {
4617 HandleBitwiseOperation(instruction, ORR);
4618}
4619
4620void LocationsBuilderARMVIXL::VisitXor(HXor* instruction) {
4621 HandleBitwiseOperation(instruction, EOR);
4622}
4623
4624void LocationsBuilderARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) {
4625 LocationSummary* locations =
4626 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4627 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
4628 || instruction->GetResultType() == Primitive::kPrimLong);
4629 // Note: GVN reorders commutative operations to have the constant on the right hand side.
4630 locations->SetInAt(0, Location::RequiresRegister());
4631 locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode));
4632 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4633}
4634
4635void InstructionCodeGeneratorARMVIXL::VisitAnd(HAnd* instruction) {
4636 HandleBitwiseOperation(instruction);
4637}
4638
4639void InstructionCodeGeneratorARMVIXL::VisitOr(HOr* instruction) {
4640 HandleBitwiseOperation(instruction);
4641}
4642
4643void InstructionCodeGeneratorARMVIXL::VisitXor(HXor* instruction) {
4644 HandleBitwiseOperation(instruction);
4645}
4646
4647// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
4648void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out,
4649 vixl32::Register first,
4650 uint32_t value) {
4651 // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier).
4652 if (value == 0xffffffffu) {
4653 if (!out.Is(first)) {
4654 __ Mov(out, first);
4655 }
4656 return;
4657 }
4658 if (value == 0u) {
4659 __ Mov(out, 0);
4660 return;
4661 }
4662 if (GetAssembler()->ShifterOperandCanHold(AND, value)) {
4663 __ And(out, first, value);
4664 } else {
4665 DCHECK(GetAssembler()->ShifterOperandCanHold(BIC, ~value));
4666 __ Bic(out, first, ~value);
4667 }
4668}
4669
4670// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
4671void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out,
4672 vixl32::Register first,
4673 uint32_t value) {
4674 // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier).
4675 if (value == 0u) {
4676 if (!out.Is(first)) {
4677 __ Mov(out, first);
4678 }
4679 return;
4680 }
4681 if (value == 0xffffffffu) {
4682 __ Mvn(out, 0);
4683 return;
4684 }
4685 if (GetAssembler()->ShifterOperandCanHold(ORR, value)) {
4686 __ Orr(out, first, value);
4687 } else {
4688 DCHECK(GetAssembler()->ShifterOperandCanHold(ORN, ~value));
4689 __ Orn(out, first, ~value);
4690 }
4691}
4692
4693// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
4694void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out,
4695 vixl32::Register first,
4696 uint32_t value) {
4697 // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier).
4698 if (value == 0u) {
4699 if (!out.Is(first)) {
4700 __ Mov(out, first);
4701 }
4702 return;
4703 }
4704 __ Eor(out, first, value);
4705}
4706
4707void InstructionCodeGeneratorARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction) {
4708 LocationSummary* locations = instruction->GetLocations();
4709 Location first = locations->InAt(0);
4710 Location second = locations->InAt(1);
4711 Location out = locations->Out();
4712
4713 if (second.IsConstant()) {
4714 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
4715 uint32_t value_low = Low32Bits(value);
4716 if (instruction->GetResultType() == Primitive::kPrimInt) {
4717 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
4718 vixl32::Register out_reg = OutputRegister(instruction);
4719 if (instruction->IsAnd()) {
4720 GenerateAndConst(out_reg, first_reg, value_low);
4721 } else if (instruction->IsOr()) {
4722 GenerateOrrConst(out_reg, first_reg, value_low);
4723 } else {
4724 DCHECK(instruction->IsXor());
4725 GenerateEorConst(out_reg, first_reg, value_low);
4726 }
4727 } else {
4728 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
4729 uint32_t value_high = High32Bits(value);
4730 vixl32::Register first_low = LowRegisterFrom(first);
4731 vixl32::Register first_high = HighRegisterFrom(first);
4732 vixl32::Register out_low = LowRegisterFrom(out);
4733 vixl32::Register out_high = HighRegisterFrom(out);
4734 if (instruction->IsAnd()) {
4735 GenerateAndConst(out_low, first_low, value_low);
4736 GenerateAndConst(out_high, first_high, value_high);
4737 } else if (instruction->IsOr()) {
4738 GenerateOrrConst(out_low, first_low, value_low);
4739 GenerateOrrConst(out_high, first_high, value_high);
4740 } else {
4741 DCHECK(instruction->IsXor());
4742 GenerateEorConst(out_low, first_low, value_low);
4743 GenerateEorConst(out_high, first_high, value_high);
4744 }
4745 }
4746 return;
4747 }
4748
4749 if (instruction->GetResultType() == Primitive::kPrimInt) {
4750 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
4751 vixl32::Register second_reg = InputRegisterAt(instruction, 1);
4752 vixl32::Register out_reg = OutputRegister(instruction);
4753 if (instruction->IsAnd()) {
4754 __ And(out_reg, first_reg, second_reg);
4755 } else if (instruction->IsOr()) {
4756 __ Orr(out_reg, first_reg, second_reg);
4757 } else {
4758 DCHECK(instruction->IsXor());
4759 __ Eor(out_reg, first_reg, second_reg);
4760 }
4761 } else {
4762 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
4763 vixl32::Register first_low = LowRegisterFrom(first);
4764 vixl32::Register first_high = HighRegisterFrom(first);
4765 vixl32::Register second_low = LowRegisterFrom(second);
4766 vixl32::Register second_high = HighRegisterFrom(second);
4767 vixl32::Register out_low = LowRegisterFrom(out);
4768 vixl32::Register out_high = HighRegisterFrom(out);
4769 if (instruction->IsAnd()) {
4770 __ And(out_low, first_low, second_low);
4771 __ And(out_high, first_high, second_high);
4772 } else if (instruction->IsOr()) {
4773 __ Orr(out_low, first_low, second_low);
4774 __ Orr(out_high, first_high, second_high);
4775 } else {
4776 DCHECK(instruction->IsXor());
4777 __ Eor(out_low, first_low, second_low);
4778 __ Eor(out_high, first_high, second_high);
4779 }
4780 }
4781}
4782
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004783void InstructionCodeGeneratorARMVIXL::GenerateGcRootFieldLoad(
4784 HInstruction* instruction ATTRIBUTE_UNUSED,
4785 Location root,
4786 vixl32::Register obj,
4787 uint32_t offset,
4788 bool requires_read_barrier) {
4789 vixl32::Register root_reg = RegisterFrom(root);
4790 if (requires_read_barrier) {
4791 TODO_VIXL32(FATAL);
4792 } else {
4793 // Plain GC root load with no read barrier.
4794 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4795 GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset);
4796 // Note that GC roots are not affected by heap poisoning, thus we
4797 // do not have to unpoison `root_reg` here.
4798 }
4799}
4800
Artem Serov02d37832016-10-25 15:25:33 +01004801void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction ATTRIBUTE_UNUSED,
4802 Location out,
4803 Location ref ATTRIBUTE_UNUSED,
4804 Location obj ATTRIBUTE_UNUSED,
4805 uint32_t offset ATTRIBUTE_UNUSED,
4806 Location index ATTRIBUTE_UNUSED) {
4807 if (kEmitCompilerReadBarrier) {
4808 DCHECK(!kUseBakerReadBarrier);
4809 TODO_VIXL32(FATAL);
4810 } else if (kPoisonHeapReferences) {
4811 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
4812 }
4813}
4814
4815// Check if the desired_dispatch_info is supported. If it is, return it,
4816// otherwise return a fall-back info that should be used instead.
4817HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
4818 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info ATTRIBUTE_UNUSED,
4819 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
4820 // TODO(VIXL): Implement optimized code paths.
4821 return {
4822 HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
4823 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
4824 0u,
4825 0u
4826 };
4827}
4828
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004829vixl32::Register CodeGeneratorARMVIXL::GetInvokeStaticOrDirectExtraParameter(
4830 HInvokeStaticOrDirect* invoke, vixl32::Register temp) {
4831 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
4832 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
4833 if (!invoke->GetLocations()->Intrinsified()) {
4834 return RegisterFrom(location);
4835 }
4836 // For intrinsics we allow any location, so it may be on the stack.
4837 if (!location.IsRegister()) {
4838 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, location.GetStackIndex());
4839 return temp;
4840 }
4841 // For register locations, check if the register was saved. If so, get it from the stack.
4842 // Note: There is a chance that the register was saved but not overwritten, so we could
4843 // save one load. However, since this is just an intrinsic slow path we prefer this
4844 // simple and more robust approach rather that trying to determine if that's the case.
4845 SlowPathCode* slow_path = GetCurrentSlowPath();
4846 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
4847 if (slow_path->IsCoreRegisterSaved(RegisterFrom(location).GetCode())) {
4848 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(RegisterFrom(location).GetCode());
4849 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, stack_offset);
4850 return temp;
4851 }
4852 return RegisterFrom(location);
4853}
4854
4855void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
4856 HInvokeStaticOrDirect* invoke, Location temp) {
4857 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4858 vixl32::Register temp_reg = RegisterFrom(temp);
4859
4860 switch (invoke->GetMethodLoadKind()) {
4861 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4862 uint32_t offset =
4863 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4864 // temp = thread->string_init_entrypoint
4865 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, tr, offset);
4866 break;
4867 }
4868 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
4869 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
4870 vixl32::Register method_reg;
4871 if (current_method.IsRegister()) {
4872 method_reg = RegisterFrom(current_method);
4873 } else {
4874 TODO_VIXL32(FATAL);
4875 }
4876 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
4877 GetAssembler()->LoadFromOffset(
4878 kLoadWord,
4879 temp_reg,
4880 method_reg,
4881 ArtMethod::DexCacheResolvedMethodsOffset(kArmPointerSize).Int32Value());
4882 // temp = temp[index_in_cache];
4883 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4884 uint32_t index_in_cache = invoke->GetDexMethodIndex();
4885 GetAssembler()->LoadFromOffset(
4886 kLoadWord, temp_reg, temp_reg, CodeGenerator::GetCachePointerOffset(index_in_cache));
4887 break;
4888 }
4889 default:
4890 TODO_VIXL32(FATAL);
4891 }
4892
4893 // TODO(VIXL): Support `CodePtrLocation` values other than `kCallArtMethod`.
4894 if (invoke->GetCodePtrLocation() != HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod) {
4895 TODO_VIXL32(FATAL);
4896 }
4897
4898 // LR = callee_method->entry_point_from_quick_compiled_code_
4899 GetAssembler()->LoadFromOffset(
4900 kLoadWord,
4901 lr,
4902 RegisterFrom(callee_method),
4903 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
4904 // LR()
4905 __ Blx(lr);
4906
4907 DCHECK(!IsLeafMethod());
4908}
4909
4910void CodeGeneratorARMVIXL::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
4911 vixl32::Register temp = RegisterFrom(temp_location);
4912 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4913 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
4914
4915 // Use the calling convention instead of the location of the receiver, as
4916 // intrinsics may have put the receiver in a different register. In the intrinsics
4917 // slow path, the arguments have been moved to the right place, so here we are
4918 // guaranteed that the receiver is the first register of the calling convention.
4919 InvokeDexCallingConventionARMVIXL calling_convention;
4920 vixl32::Register receiver = calling_convention.GetRegisterAt(0);
4921 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4922 // /* HeapReference<Class> */ temp = receiver->klass_
4923 GetAssembler()->LoadFromOffset(kLoadWord, temp, receiver, class_offset);
4924 MaybeRecordImplicitNullCheck(invoke);
4925 // Instead of simply (possibly) unpoisoning `temp` here, we should
4926 // emit a read barrier for the previous class reference load.
4927 // However this is not required in practice, as this is an
4928 // intermediate/temporary reference and because the current
4929 // concurrent copying collector keeps the from-space memory
4930 // intact/accessible until the end of the marking phase (the
4931 // concurrent copying collector may not in the future).
4932 GetAssembler()->MaybeUnpoisonHeapReference(temp);
4933
4934 // temp = temp->GetMethodAt(method_offset);
4935 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4936 kArmPointerSize).Int32Value();
4937 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
4938 // LR = temp->GetEntryPoint();
4939 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
4940 // LR();
4941 __ Blx(lr);
4942}
4943
Artem Serov02d37832016-10-25 15:25:33 +01004944// Copy the result of a call into the given target.
4945void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
4946 Primitive::Type type ATTRIBUTE_UNUSED) {
4947 TODO_VIXL32(FATAL);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004948}
Scott Wakelingfe885462016-09-22 10:24:38 +01004949
4950#undef __
4951#undef QUICK_ENTRY_POINT
4952#undef TODO_VIXL32
4953
4954} // namespace arm
4955} // namespace art