blob: ac0f37b7178ba80f75a3693a9dbb7bd5a384c873 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bikb13c65b2017-03-21 20:14:07 -0700143 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000144 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700146 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100147 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000148 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700149 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100150 if (successor_ == nullptr) {
151 __ jmp(GetReturnLabel());
152 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155 }
156
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100157 Label* GetReturnLabel() {
158 DCHECK(successor_ == nullptr);
159 return &return_label_;
160 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000161
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100162 HBasicBlock* GetSuccessor() const {
163 return successor_;
164 }
165
Alexandre Rames9931f312015-06-19 14:47:01 +0100166 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
167
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000168 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100169 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170 Label return_label_;
171
172 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
173};
174
Andreas Gampe85b62f22015-09-09 13:15:38 -0700175class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100177 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000178 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100179
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000180 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100181 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000182 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100183 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000184 if (instruction_->CanThrowIntoCatchBlock()) {
185 // Live registers will be restored in the catch block if caught.
186 SaveLiveRegisters(codegen, instruction_->GetLocations());
187 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400188 // Are we using an array length from memory?
189 HInstruction* array_length = instruction_->InputAt(1);
190 Location length_loc = locations->InAt(1);
191 InvokeRuntimeCallingConvention calling_convention;
192 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
193 // Load the array length into our temporary.
194 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
195 Location array_loc = array_length->GetLocations()->InAt(0);
196 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
197 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
198 // Check for conflicts with index.
199 if (length_loc.Equals(locations->InAt(0))) {
200 // We know we aren't using parameter 2.
201 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
202 }
203 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700204 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100205 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700206 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400207 }
208
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000209 // We're moving two locations to locations that could overlap, so we need a parallel
210 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000211 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100212 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000213 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100214 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400215 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100216 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
217 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100218 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
219 ? kQuickThrowStringBounds
220 : kQuickThrowArrayBounds;
221 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100222 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000223 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100224 }
225
Alexandre Rames8158f282015-08-07 10:26:17 +0100226 bool IsFatal() const OVERRIDE { return true; }
227
Alexandre Rames9931f312015-06-19 14:47:01 +0100228 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
229
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100230 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
232};
233
Andreas Gampe85b62f22015-09-09 13:15:38 -0700234class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000236 LoadClassSlowPathX86_64(HLoadClass* cls,
237 HInstruction* at,
238 uint32_t dex_pc,
239 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000240 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000241 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
242 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000244 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000245 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000246 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100247 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000249 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250
Vladimir Markoea4c1262017-02-06 19:59:33 +0000251 // Custom calling convention: RAX serves as both input and output.
252 __ movl(CpuRegister(RAX), Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100253 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000254 instruction_,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 dex_pc_,
256 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000257 if (do_clinit_) {
258 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
259 } else {
260 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
261 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100262
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000264 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000265 if (out.IsValid()) {
266 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000267 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000268 }
269
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000270 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000271 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
272 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
273 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
274 DCHECK(out.IsValid());
275 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
276 locations->Out().AsRegister<CpuRegister>());
277 Label* fixup_label = x86_64_codegen->NewTypeBssEntryPatch(cls_);
278 __ Bind(fixup_label);
279 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100280 __ jmp(GetExitLabel());
281 }
282
Alexandre Rames9931f312015-06-19 14:47:01 +0100283 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
284
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // The class this slow path will load.
287 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100288
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000289 // The dex PC of `at_`.
290 const uint32_t dex_pc_;
291
292 // Whether to initialize the class.
293 const bool do_clinit_;
294
295 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100296};
297
Vladimir Markoaad75c62016-10-03 08:46:48 +0000298class LoadStringSlowPathX86_64 : public SlowPathCode {
299 public:
300 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
301
302 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
303 LocationSummary* locations = instruction_->GetLocations();
304 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
305
306 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
307 __ Bind(GetEntryLabel());
308 SaveLiveRegisters(codegen, locations);
309
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000310 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100311 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000313 x86_64_codegen->InvokeRuntime(kQuickResolveString,
314 instruction_,
315 instruction_->GetDexPc(),
316 this);
317 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
318 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
319 RestoreLiveRegisters(codegen, locations);
320
321 // Store the resolved String to the BSS entry.
322 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
323 locations->Out().AsRegister<CpuRegister>());
324 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
325 __ Bind(fixup_label);
326
327 __ jmp(GetExitLabel());
328 }
329
330 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
331
332 private:
333 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
334};
335
Andreas Gampe85b62f22015-09-09 13:15:38 -0700336class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000337 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000338 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000339 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000340
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000341 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000342 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100343 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000344 DCHECK(instruction_->IsCheckCast()
345 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
Roland Levillain0d5a2812015-11-13 10:07:31 +0000347 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000349
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350 if (!is_fatal_) {
351 SaveLiveRegisters(codegen, locations);
352 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000353
354 // We're moving two locations to locations that could overlap, so we need a parallel
355 // move resolver.
356 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800357 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800358 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
359 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800360 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
362 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100364 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 } else {
367 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800368 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
369 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000370 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 if (!is_fatal_) {
373 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000374 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 RestoreLiveRegisters(codegen, locations);
378 __ jmp(GetExitLabel());
379 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000380 }
381
Alexandre Rames9931f312015-06-19 14:47:01 +0100382 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
383
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 bool IsFatal() const OVERRIDE { return is_fatal_; }
385
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000386 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388
389 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
390};
391
Andreas Gampe85b62f22015-09-09 13:15:38 -0700392class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393 public:
Aart Bik42249c32016-01-07 15:33:50 -0800394 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396
397 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000398 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100400 LocationSummary* locations = instruction_->GetLocations();
401 SaveLiveRegisters(codegen, locations);
402 InvokeRuntimeCallingConvention calling_convention;
403 x86_64_codegen->Load32BitValue(
404 CpuRegister(calling_convention.GetRegisterAt(0)),
405 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100406 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100407 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 }
409
Alexandre Rames9931f312015-06-19 14:47:01 +0100410 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
411
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700412 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700413 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
414};
415
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416class ArraySetSlowPathX86_64 : public SlowPathCode {
417 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000418 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100419
420 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
421 LocationSummary* locations = instruction_->GetLocations();
422 __ Bind(GetEntryLabel());
423 SaveLiveRegisters(codegen, locations);
424
425 InvokeRuntimeCallingConvention calling_convention;
426 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
427 parallel_move.AddMove(
428 locations->InAt(0),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
430 Primitive::kPrimNot,
431 nullptr);
432 parallel_move.AddMove(
433 locations->InAt(1),
434 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
435 Primitive::kPrimInt,
436 nullptr);
437 parallel_move.AddMove(
438 locations->InAt(2),
439 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
440 Primitive::kPrimNot,
441 nullptr);
442 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
443
Roland Levillain0d5a2812015-11-13 10:07:31 +0000444 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100445 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000446 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447 RestoreLiveRegisters(codegen, locations);
448 __ jmp(GetExitLabel());
449 }
450
451 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
452
453 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100454 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
455};
456
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457// Slow path marking an object reference `ref` during a read
458// barrier. The field `obj.field` in the object `obj` holding this
459// reference does not get updated by this slow path after marking (see
460// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
461//
462// This means that after the execution of this slow path, `ref` will
463// always be up-to-date, but `obj.field` may not; i.e., after the
464// flip, `ref` will be a to-space reference, but `obj.field` will
465// probably still be a from-space reference (unless it gets updated by
466// another thread, or if another thread installed another object
467// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
469 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100470 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
471 Location ref,
472 bool unpoison_ref_before_marking)
473 : SlowPathCode(instruction),
474 ref_(ref),
475 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476 DCHECK(kEmitCompilerReadBarrier);
477 }
478
479 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
480
481 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
482 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100483 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
484 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000485 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100486 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000487 DCHECK(instruction_->IsInstanceFieldGet() ||
488 instruction_->IsStaticFieldGet() ||
489 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100490 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 instruction_->IsLoadClass() ||
492 instruction_->IsLoadString() ||
493 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100494 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100495 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
496 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000497 << "Unexpected instruction in read barrier marking slow path: "
498 << instruction_->DebugName();
499
500 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100501 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000502 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000504 }
Roland Levillain4359e612016-07-20 11:32:19 +0100505 // No need to save live registers; it's taken care of by the
506 // entrypoint. Also, there is no need to update the stack mask,
507 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000508 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 DCHECK_NE(ref_reg, RSP);
510 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100511 // "Compact" slow path, saving two moves.
512 //
513 // Instead of using the standard runtime calling convention (input
514 // and output in R0):
515 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100516 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100517 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100518 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100519 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100521 // of a dedicated entrypoint:
522 //
523 // rX <- ReadBarrierMarkRegX(rX)
524 //
525 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100527 // This runtime call does not require a stack map.
528 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000529 __ jmp(GetExitLabel());
530 }
531
532 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100533 // The location (register) of the marked object reference.
534 const Location ref_;
535 // Should the reference in `ref_` be unpoisoned prior to marking it?
536 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537
538 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
539};
540
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100541// Slow path marking an object reference `ref` during a read barrier,
542// and if needed, atomically updating the field `obj.field` in the
543// object `obj` holding this reference after marking (contrary to
544// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
545// `obj.field`).
546//
547// This means that after the execution of this slow path, both `ref`
548// and `obj.field` will be up-to-date; i.e., after the flip, both will
549// hold the same to-space reference (unless another thread installed
550// another object reference (different from `ref`) in `obj.field`).
551class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
552 public:
553 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
554 Location ref,
555 CpuRegister obj,
556 const Address& field_addr,
557 bool unpoison_ref_before_marking,
558 CpuRegister temp1,
559 CpuRegister temp2)
560 : SlowPathCode(instruction),
561 ref_(ref),
562 obj_(obj),
563 field_addr_(field_addr),
564 unpoison_ref_before_marking_(unpoison_ref_before_marking),
565 temp1_(temp1),
566 temp2_(temp2) {
567 DCHECK(kEmitCompilerReadBarrier);
568 }
569
570 const char* GetDescription() const OVERRIDE {
571 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
572 }
573
574 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
575 LocationSummary* locations = instruction_->GetLocations();
576 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
577 Register ref_reg = ref_cpu_reg.AsRegister();
578 DCHECK(locations->CanCall());
579 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
580 // This slow path is only used by the UnsafeCASObject intrinsic.
581 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
582 << "Unexpected instruction in read barrier marking and field updating slow path: "
583 << instruction_->DebugName();
584 DCHECK(instruction_->GetLocations()->Intrinsified());
585 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
586
587 __ Bind(GetEntryLabel());
588 if (unpoison_ref_before_marking_) {
589 // Object* ref = ref_addr->AsMirrorPtr()
590 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
591 }
592
593 // Save the old (unpoisoned) reference.
594 __ movl(temp1_, ref_cpu_reg);
595
596 // No need to save live registers; it's taken care of by the
597 // entrypoint. Also, there is no need to update the stack mask,
598 // as this runtime call will not trigger a garbage collection.
599 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
600 DCHECK_NE(ref_reg, RSP);
601 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
602 // "Compact" slow path, saving two moves.
603 //
604 // Instead of using the standard runtime calling convention (input
605 // and output in R0):
606 //
607 // RDI <- ref
608 // RAX <- ReadBarrierMark(RDI)
609 // ref <- RAX
610 //
611 // we just use rX (the register containing `ref`) as input and output
612 // of a dedicated entrypoint:
613 //
614 // rX <- ReadBarrierMarkRegX(rX)
615 //
616 int32_t entry_point_offset =
617 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
618 // This runtime call does not require a stack map.
619 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
620
621 // If the new reference is different from the old reference,
622 // update the field in the holder (`*field_addr`).
623 //
624 // Note that this field could also hold a different object, if
625 // another thread had concurrently changed it. In that case, the
626 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
627 // operation below would abort the CAS, leaving the field as-is.
628 NearLabel done;
629 __ cmpl(temp1_, ref_cpu_reg);
630 __ j(kEqual, &done);
631
632 // Update the the holder's field atomically. This may fail if
633 // mutator updates before us, but it's OK. This is achived
634 // using a strong compare-and-set (CAS) operation with relaxed
635 // memory synchronization ordering, where the expected value is
636 // the old reference and the desired value is the new reference.
637 // This operation is implemented with a 32-bit LOCK CMPXLCHG
638 // instruction, which requires the expected value (the old
639 // reference) to be in EAX. Save RAX beforehand, and move the
640 // expected value (stored in `temp1_`) into EAX.
641 __ movq(temp2_, CpuRegister(RAX));
642 __ movl(CpuRegister(RAX), temp1_);
643
644 // Convenience aliases.
645 CpuRegister base = obj_;
646 CpuRegister expected = CpuRegister(RAX);
647 CpuRegister value = ref_cpu_reg;
648
649 bool base_equals_value = (base.AsRegister() == value.AsRegister());
650 Register value_reg = ref_reg;
651 if (kPoisonHeapReferences) {
652 if (base_equals_value) {
653 // If `base` and `value` are the same register location, move
654 // `value_reg` to a temporary register. This way, poisoning
655 // `value_reg` won't invalidate `base`.
656 value_reg = temp1_.AsRegister();
657 __ movl(CpuRegister(value_reg), base);
658 }
659
660 // Check that the register allocator did not assign the location
661 // of `expected` (RAX) to `value` nor to `base`, so that heap
662 // poisoning (when enabled) works as intended below.
663 // - If `value` were equal to `expected`, both references would
664 // be poisoned twice, meaning they would not be poisoned at
665 // all, as heap poisoning uses address negation.
666 // - If `base` were equal to `expected`, poisoning `expected`
667 // would invalidate `base`.
668 DCHECK_NE(value_reg, expected.AsRegister());
669 DCHECK_NE(base.AsRegister(), expected.AsRegister());
670
671 __ PoisonHeapReference(expected);
672 __ PoisonHeapReference(CpuRegister(value_reg));
673 }
674
675 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
676
677 // If heap poisoning is enabled, we need to unpoison the values
678 // that were poisoned earlier.
679 if (kPoisonHeapReferences) {
680 if (base_equals_value) {
681 // `value_reg` has been moved to a temporary register, no need
682 // to unpoison it.
683 } else {
684 __ UnpoisonHeapReference(CpuRegister(value_reg));
685 }
686 // No need to unpoison `expected` (RAX), as it is be overwritten below.
687 }
688
689 // Restore RAX.
690 __ movq(CpuRegister(RAX), temp2_);
691
692 __ Bind(&done);
693 __ jmp(GetExitLabel());
694 }
695
696 private:
697 // The location (register) of the marked object reference.
698 const Location ref_;
699 // The register containing the object holding the marked object reference field.
700 const CpuRegister obj_;
701 // The address of the marked reference field. The base of this address must be `obj_`.
702 const Address field_addr_;
703
704 // Should the reference in `ref_` be unpoisoned prior to marking it?
705 const bool unpoison_ref_before_marking_;
706
707 const CpuRegister temp1_;
708 const CpuRegister temp2_;
709
710 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
711};
712
Roland Levillain0d5a2812015-11-13 10:07:31 +0000713// Slow path generating a read barrier for a heap reference.
714class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
715 public:
716 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
717 Location out,
718 Location ref,
719 Location obj,
720 uint32_t offset,
721 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000722 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000723 out_(out),
724 ref_(ref),
725 obj_(obj),
726 offset_(offset),
727 index_(index) {
728 DCHECK(kEmitCompilerReadBarrier);
729 // If `obj` is equal to `out` or `ref`, it means the initial
730 // object has been overwritten by (or after) the heap object
731 // reference load to be instrumented, e.g.:
732 //
733 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000734 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000735 //
736 // In that case, we have lost the information about the original
737 // object, and the emitted read barrier cannot work properly.
738 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
739 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
740}
741
742 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
743 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
744 LocationSummary* locations = instruction_->GetLocations();
745 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
746 DCHECK(locations->CanCall());
747 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100748 DCHECK(instruction_->IsInstanceFieldGet() ||
749 instruction_->IsStaticFieldGet() ||
750 instruction_->IsArrayGet() ||
751 instruction_->IsInstanceOf() ||
752 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700753 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000754 << "Unexpected instruction in read barrier for heap reference slow path: "
755 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000756
757 __ Bind(GetEntryLabel());
758 SaveLiveRegisters(codegen, locations);
759
760 // We may have to change the index's value, but as `index_` is a
761 // constant member (like other "inputs" of this slow path),
762 // introduce a copy of it, `index`.
763 Location index = index_;
764 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100765 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000766 if (instruction_->IsArrayGet()) {
767 // Compute real offset and store it in index_.
768 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
769 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
770 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
771 // We are about to change the value of `index_reg` (see the
772 // calls to art::x86_64::X86_64Assembler::shll and
773 // art::x86_64::X86_64Assembler::AddImmediate below), but it
774 // has not been saved by the previous call to
775 // art::SlowPathCode::SaveLiveRegisters, as it is a
776 // callee-save register --
777 // art::SlowPathCode::SaveLiveRegisters does not consider
778 // callee-save registers, as it has been designed with the
779 // assumption that callee-save registers are supposed to be
780 // handled by the called function. So, as a callee-save
781 // register, `index_reg` _would_ eventually be saved onto
782 // the stack, but it would be too late: we would have
783 // changed its value earlier. Therefore, we manually save
784 // it here into another freely available register,
785 // `free_reg`, chosen of course among the caller-save
786 // registers (as a callee-save `free_reg` register would
787 // exhibit the same problem).
788 //
789 // Note we could have requested a temporary register from
790 // the register allocator instead; but we prefer not to, as
791 // this is a slow path, and we know we can find a
792 // caller-save register that is available.
793 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
794 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
795 index_reg = free_reg;
796 index = Location::RegisterLocation(index_reg);
797 } else {
798 // The initial register stored in `index_` has already been
799 // saved in the call to art::SlowPathCode::SaveLiveRegisters
800 // (as it is not a callee-save register), so we can freely
801 // use it.
802 }
803 // Shifting the index value contained in `index_reg` by the
804 // scale factor (2) cannot overflow in practice, as the
805 // runtime is unable to allocate object arrays with a size
806 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
807 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
808 static_assert(
809 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
810 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
811 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
812 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100813 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
814 // intrinsics, `index_` is not shifted by a scale factor of 2
815 // (as in the case of ArrayGet), as it is actually an offset
816 // to an object field within an object.
817 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000818 DCHECK(instruction_->GetLocations()->Intrinsified());
819 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
820 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
821 << instruction_->AsInvoke()->GetIntrinsic();
822 DCHECK_EQ(offset_, 0U);
823 DCHECK(index_.IsRegister());
824 }
825 }
826
827 // We're moving two or three locations to locations that could
828 // overlap, so we need a parallel move resolver.
829 InvokeRuntimeCallingConvention calling_convention;
830 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
831 parallel_move.AddMove(ref_,
832 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
833 Primitive::kPrimNot,
834 nullptr);
835 parallel_move.AddMove(obj_,
836 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
837 Primitive::kPrimNot,
838 nullptr);
839 if (index.IsValid()) {
840 parallel_move.AddMove(index,
841 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
842 Primitive::kPrimInt,
843 nullptr);
844 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
845 } else {
846 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
847 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
848 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100849 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000850 instruction_,
851 instruction_->GetDexPc(),
852 this);
853 CheckEntrypointTypes<
854 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
855 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
856
857 RestoreLiveRegisters(codegen, locations);
858 __ jmp(GetExitLabel());
859 }
860
861 const char* GetDescription() const OVERRIDE {
862 return "ReadBarrierForHeapReferenceSlowPathX86_64";
863 }
864
865 private:
866 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
867 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
868 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
869 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
870 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
871 return static_cast<CpuRegister>(i);
872 }
873 }
874 // We shall never fail to find a free caller-save register, as
875 // there are more than two core caller-save registers on x86-64
876 // (meaning it is possible to find one which is different from
877 // `ref` and `obj`).
878 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
879 LOG(FATAL) << "Could not find a free caller-save register";
880 UNREACHABLE();
881 }
882
Roland Levillain0d5a2812015-11-13 10:07:31 +0000883 const Location out_;
884 const Location ref_;
885 const Location obj_;
886 const uint32_t offset_;
887 // An additional location containing an index to an array.
888 // Only used for HArrayGet and the UnsafeGetObject &
889 // UnsafeGetObjectVolatile intrinsics.
890 const Location index_;
891
892 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
893};
894
895// Slow path generating a read barrier for a GC root.
896class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
897 public:
898 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000899 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000900 DCHECK(kEmitCompilerReadBarrier);
901 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000902
903 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
904 LocationSummary* locations = instruction_->GetLocations();
905 DCHECK(locations->CanCall());
906 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000907 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
908 << "Unexpected instruction in read barrier for GC root slow path: "
909 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910
911 __ Bind(GetEntryLabel());
912 SaveLiveRegisters(codegen, locations);
913
914 InvokeRuntimeCallingConvention calling_convention;
915 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
916 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100917 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918 instruction_,
919 instruction_->GetDexPc(),
920 this);
921 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
922 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
923
924 RestoreLiveRegisters(codegen, locations);
925 __ jmp(GetExitLabel());
926 }
927
928 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
929
930 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000931 const Location out_;
932 const Location root_;
933
934 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
935};
936
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100937#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100938// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
939#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100940
Roland Levillain4fa13f62015-07-06 18:11:54 +0100941inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700942 switch (cond) {
943 case kCondEQ: return kEqual;
944 case kCondNE: return kNotEqual;
945 case kCondLT: return kLess;
946 case kCondLE: return kLessEqual;
947 case kCondGT: return kGreater;
948 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700949 case kCondB: return kBelow;
950 case kCondBE: return kBelowEqual;
951 case kCondA: return kAbove;
952 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700953 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100954 LOG(FATAL) << "Unreachable";
955 UNREACHABLE();
956}
957
Aart Bike9f37602015-10-09 11:15:55 -0700958// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100959inline Condition X86_64FPCondition(IfCondition cond) {
960 switch (cond) {
961 case kCondEQ: return kEqual;
962 case kCondNE: return kNotEqual;
963 case kCondLT: return kBelow;
964 case kCondLE: return kBelowEqual;
965 case kCondGT: return kAbove;
966 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700967 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100968 };
969 LOG(FATAL) << "Unreachable";
970 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700971}
972
Vladimir Markodc151b22015-10-15 18:02:30 +0100973HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
974 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100975 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000976 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100977}
978
Serguei Katkov288c7a82016-05-16 11:53:15 +0600979Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
980 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800981 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000982 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
983 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100984 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000985 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100986 uint32_t offset =
987 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
988 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000989 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100990 }
Vladimir Marko58155012015-08-19 12:49:41 +0000991 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000992 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000993 break;
994 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Vladimir Marko2d73f332017-03-16 15:55:49 +0000995 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
Vladimir Marko58155012015-08-19 12:49:41 +0000996 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000997 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000998 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000999 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001000 // Bind a new fixup label at the end of the "movl" insn.
1001 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00001002 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +00001003 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001004 }
Vladimir Marko58155012015-08-19 12:49:41 +00001005 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001006 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001007 Register method_reg;
1008 CpuRegister reg = temp.AsRegister<CpuRegister>();
1009 if (current_method.IsRegister()) {
1010 method_reg = current_method.AsRegister<Register>();
1011 } else {
1012 DCHECK(invoke->GetLocations()->Intrinsified());
1013 DCHECK(!current_method.IsValid());
1014 method_reg = reg.AsRegister();
1015 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1016 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001017 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001018 __ movq(reg,
1019 Address(CpuRegister(method_reg),
1020 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001021 // temp = temp[index_in_cache];
1022 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1023 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001024 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1025 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001026 }
Vladimir Marko58155012015-08-19 12:49:41 +00001027 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001028 return callee_method;
1029}
1030
1031void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1032 Location temp) {
1033 // All registers are assumed to be correctly set up.
1034 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001035
1036 switch (invoke->GetCodePtrLocation()) {
1037 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1038 __ call(&frame_entry_label_);
1039 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001040 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1041 // (callee_method + offset_of_quick_compiled_code)()
1042 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1043 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001044 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001045 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001046 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001047
1048 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001049}
1050
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001051void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1052 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1053 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1054 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001055
1056 // Use the calling convention instead of the location of the receiver, as
1057 // intrinsics may have put the receiver in a different register. In the intrinsics
1058 // slow path, the arguments have been moved to the right place, so here we are
1059 // guaranteed that the receiver is the first register of the calling convention.
1060 InvokeDexCallingConvention calling_convention;
1061 Register receiver = calling_convention.GetRegisterAt(0);
1062
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001063 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001064 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001065 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001066 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001067 // Instead of simply (possibly) unpoisoning `temp` here, we should
1068 // emit a read barrier for the previous class reference load.
1069 // However this is not required in practice, as this is an
1070 // intermediate/temporary reference and because the current
1071 // concurrent copying collector keeps the from-space memory
1072 // intact/accessible until the end of the marking phase (the
1073 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001074 __ MaybeUnpoisonHeapReference(temp);
1075 // temp = temp->GetMethodAt(method_offset);
1076 __ movq(temp, Address(temp, method_offset));
1077 // call temp->GetEntryPoint();
1078 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001079 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001080}
1081
Vladimir Markoaad75c62016-10-03 08:46:48 +00001082void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1083 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001084 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001085 __ Bind(&string_patches_.back().label);
1086}
1087
Vladimir Marko1998cd02017-01-13 13:02:58 +00001088void CodeGeneratorX86_64::RecordBootTypePatch(HLoadClass* load_class) {
1089 boot_image_type_patches_.emplace_back(load_class->GetDexFile(),
1090 load_class->GetTypeIndex().index_);
1091 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001092}
1093
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001094Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001095 type_bss_entry_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
1096 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001097}
1098
Vladimir Markoaad75c62016-10-03 08:46:48 +00001099Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1100 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001101 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001102 return &string_patches_.back().label;
1103}
1104
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001105Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1106 uint32_t element_offset) {
1107 // Add a patch entry and return the label.
1108 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1109 return &pc_relative_dex_cache_patches_.back().label;
1110}
1111
Vladimir Markoaad75c62016-10-03 08:46:48 +00001112// The label points to the end of the "movl" or another instruction but the literal offset
1113// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1114constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1115
1116template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1117inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1118 const ArenaDeque<PatchInfo<Label>>& infos,
1119 ArenaVector<LinkerPatch>* linker_patches) {
1120 for (const PatchInfo<Label>& info : infos) {
1121 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1122 linker_patches->push_back(
1123 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1124 }
1125}
1126
Vladimir Marko58155012015-08-19 12:49:41 +00001127void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1128 DCHECK(linker_patches->empty());
1129 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001130 pc_relative_dex_cache_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001131 string_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001132 boot_image_type_patches_.size() +
1133 type_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001134 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001135 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1136 linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001137 if (GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001138 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
1139 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001140 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001141 } else {
1142 DCHECK(boot_image_type_patches_.empty());
1143 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001144 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001145 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1146 linker_patches);
1147 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001148}
1149
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001150void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001151 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001152}
1153
1154void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001155 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001156}
1157
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001158size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1159 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1160 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001161}
1162
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001163size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1164 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1165 return kX86_64WordSize;
1166}
1167
1168size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001169 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001170 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001171 } else {
1172 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1173 }
1174 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001175}
1176
1177size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001178 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001179 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001180 } else {
1181 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1182 }
1183 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001184}
1185
Calin Juravle175dc732015-08-25 15:42:32 +01001186void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1187 HInstruction* instruction,
1188 uint32_t dex_pc,
1189 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001190 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001191 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1192 if (EntrypointRequiresStackMap(entrypoint)) {
1193 RecordPcInfo(instruction, dex_pc, slow_path);
1194 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001195}
1196
Roland Levillaindec8f632016-07-22 17:10:06 +01001197void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1198 HInstruction* instruction,
1199 SlowPathCode* slow_path) {
1200 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001201 GenerateInvokeRuntime(entry_point_offset);
1202}
1203
1204void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001205 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1206}
1207
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001208static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001209// Use a fake return address register to mimic Quick.
1210static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001211CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001212 const X86_64InstructionSetFeatures& isa_features,
1213 const CompilerOptions& compiler_options,
1214 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001215 : CodeGenerator(graph,
1216 kNumberOfCpuRegisters,
1217 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001218 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001219 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1220 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001221 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001222 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1223 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001224 compiler_options,
1225 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001226 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001227 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001228 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001229 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001230 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001231 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001232 constant_area_start_(0),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001233 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001234 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001235 boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1236 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001237 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001238 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1239 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001240 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1241}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001242
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001243InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1244 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001245 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001246 assembler_(codegen->GetAssembler()),
1247 codegen_(codegen) {}
1248
David Brazdil58282f42016-01-14 12:45:10 +00001249void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001251 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001252
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001253 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001254 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001255}
1256
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001257static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001258 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001259}
David Srbecky9d8606d2015-04-12 09:35:32 +01001260
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001261static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001262 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001263}
1264
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001265void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001266 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001267 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001268 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001269 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001270 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001271
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001272 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001273 __ testq(CpuRegister(RAX), Address(
1274 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001275 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001276 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001277
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001278 if (HasEmptyFrame()) {
1279 return;
1280 }
1281
Nicolas Geoffray98893962015-01-21 12:32:32 +00001282 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001283 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001284 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001285 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001286 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1287 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001288 }
1289 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001290
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001291 int adjust = GetFrameSize() - GetCoreSpillSize();
1292 __ subq(CpuRegister(RSP), Immediate(adjust));
1293 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001294 uint32_t xmm_spill_location = GetFpuSpillStart();
1295 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001296
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001297 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1298 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001299 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1300 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1301 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001302 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001303 }
1304
Mingyao Yang063fc772016-08-02 11:02:54 -07001305 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1306 // Initialize should_deoptimize flag to 0.
1307 __ movl(Address(CpuRegister(RSP), xmm_spill_location - kShouldDeoptimizeFlagSize),
1308 Immediate(0));
1309 }
1310
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001311 // Save the current method if we need it. Note that we do not
1312 // do this in HCurrentMethod, as the instruction might have been removed
1313 // in the SSA graph.
1314 if (RequiresCurrentMethod()) {
1315 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1316 CpuRegister(kMethodRegisterArgument));
1317 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001318}
1319
1320void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001321 __ cfi().RememberState();
1322 if (!HasEmptyFrame()) {
1323 uint32_t xmm_spill_location = GetFpuSpillStart();
1324 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1325 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1326 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1327 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1328 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1329 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1330 }
1331 }
1332
1333 int adjust = GetFrameSize() - GetCoreSpillSize();
1334 __ addq(CpuRegister(RSP), Immediate(adjust));
1335 __ cfi().AdjustCFAOffset(-adjust);
1336
1337 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1338 Register reg = kCoreCalleeSaves[i];
1339 if (allocated_registers_.ContainsCoreRegister(reg)) {
1340 __ popq(CpuRegister(reg));
1341 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1342 __ cfi().Restore(DWARFReg(reg));
1343 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001344 }
1345 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001346 __ ret();
1347 __ cfi().RestoreState();
1348 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001349}
1350
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001351void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1352 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001353}
1354
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001355void CodeGeneratorX86_64::Move(Location destination, Location source) {
1356 if (source.Equals(destination)) {
1357 return;
1358 }
1359 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001360 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001361 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001362 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001363 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001364 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001365 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001366 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1367 } else if (source.IsConstant()) {
1368 HConstant* constant = source.GetConstant();
1369 if (constant->IsLongConstant()) {
1370 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1371 } else {
1372 Load32BitValue(dest, GetInt32ValueOf(constant));
1373 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001374 } else {
1375 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001376 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001377 }
1378 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001379 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001380 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001381 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001382 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001383 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1384 } else if (source.IsConstant()) {
1385 HConstant* constant = source.GetConstant();
1386 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1387 if (constant->IsFloatConstant()) {
1388 Load32BitValue(dest, static_cast<int32_t>(value));
1389 } else {
1390 Load64BitValue(dest, value);
1391 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001392 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001393 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001394 } else {
1395 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001396 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001397 }
1398 } else if (destination.IsStackSlot()) {
1399 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001400 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001401 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001402 } else if (source.IsFpuRegister()) {
1403 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001404 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001405 } else if (source.IsConstant()) {
1406 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001407 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001408 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001409 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001410 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001411 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1412 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001413 }
1414 } else {
1415 DCHECK(destination.IsDoubleStackSlot());
1416 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001417 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001418 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001419 } else if (source.IsFpuRegister()) {
1420 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001421 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001422 } else if (source.IsConstant()) {
1423 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001424 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1425 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001426 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001427 } else {
1428 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001429 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1430 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001431 }
1432 }
1433}
1434
Calin Juravle175dc732015-08-25 15:42:32 +01001435void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1436 DCHECK(location.IsRegister());
1437 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1438}
1439
Calin Juravlee460d1d2015-09-29 04:52:17 +01001440void CodeGeneratorX86_64::MoveLocation(
1441 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1442 Move(dst, src);
1443}
1444
1445void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1446 if (location.IsRegister()) {
1447 locations->AddTemp(location);
1448 } else {
1449 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1450 }
1451}
1452
David Brazdilfc6a86a2015-06-26 10:33:45 +00001453void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001454 DCHECK(!successor->IsExitBlock());
1455
1456 HBasicBlock* block = got->GetBlock();
1457 HInstruction* previous = got->GetPrevious();
1458
1459 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001460 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001461 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1462 return;
1463 }
1464
1465 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1466 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1467 }
1468 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001469 __ jmp(codegen_->GetLabelOf(successor));
1470 }
1471}
1472
David Brazdilfc6a86a2015-06-26 10:33:45 +00001473void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1474 got->SetLocations(nullptr);
1475}
1476
1477void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1478 HandleGoto(got, got->GetSuccessor());
1479}
1480
1481void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1482 try_boundary->SetLocations(nullptr);
1483}
1484
1485void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1486 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1487 if (!successor->IsExitBlock()) {
1488 HandleGoto(try_boundary, successor);
1489 }
1490}
1491
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001492void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1493 exit->SetLocations(nullptr);
1494}
1495
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001496void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001497}
1498
Mark Mendell152408f2015-12-31 12:28:50 -05001499template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001500void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001501 LabelType* true_label,
1502 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001503 if (cond->IsFPConditionTrueIfNaN()) {
1504 __ j(kUnordered, true_label);
1505 } else if (cond->IsFPConditionFalseIfNaN()) {
1506 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001507 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001508 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001509}
1510
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001511void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001512 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001513
Mark Mendellc4701932015-04-10 13:18:51 -04001514 Location left = locations->InAt(0);
1515 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001516 Primitive::Type type = condition->InputAt(0)->GetType();
1517 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001518 case Primitive::kPrimBoolean:
1519 case Primitive::kPrimByte:
1520 case Primitive::kPrimChar:
1521 case Primitive::kPrimShort:
1522 case Primitive::kPrimInt:
1523 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001524 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001525 break;
1526 }
Mark Mendellc4701932015-04-10 13:18:51 -04001527 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001528 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001529 break;
1530 }
1531 case Primitive::kPrimFloat: {
1532 if (right.IsFpuRegister()) {
1533 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1534 } else if (right.IsConstant()) {
1535 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1536 codegen_->LiteralFloatAddress(
1537 right.GetConstant()->AsFloatConstant()->GetValue()));
1538 } else {
1539 DCHECK(right.IsStackSlot());
1540 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1541 Address(CpuRegister(RSP), right.GetStackIndex()));
1542 }
Mark Mendellc4701932015-04-10 13:18:51 -04001543 break;
1544 }
1545 case Primitive::kPrimDouble: {
1546 if (right.IsFpuRegister()) {
1547 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1548 } else if (right.IsConstant()) {
1549 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1550 codegen_->LiteralDoubleAddress(
1551 right.GetConstant()->AsDoubleConstant()->GetValue()));
1552 } else {
1553 DCHECK(right.IsDoubleStackSlot());
1554 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1555 Address(CpuRegister(RSP), right.GetStackIndex()));
1556 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001557 break;
1558 }
1559 default:
1560 LOG(FATAL) << "Unexpected condition type " << type;
1561 }
1562}
1563
1564template<class LabelType>
1565void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1566 LabelType* true_target_in,
1567 LabelType* false_target_in) {
1568 // Generated branching requires both targets to be explicit. If either of the
1569 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1570 LabelType fallthrough_target;
1571 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1572 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1573
1574 // Generate the comparison to set the CC.
1575 GenerateCompareTest(condition);
1576
1577 // Now generate the correct jump(s).
1578 Primitive::Type type = condition->InputAt(0)->GetType();
1579 switch (type) {
1580 case Primitive::kPrimLong: {
1581 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1582 break;
1583 }
1584 case Primitive::kPrimFloat: {
1585 GenerateFPJumps(condition, true_target, false_target);
1586 break;
1587 }
1588 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001589 GenerateFPJumps(condition, true_target, false_target);
1590 break;
1591 }
1592 default:
1593 LOG(FATAL) << "Unexpected condition type " << type;
1594 }
1595
David Brazdil0debae72015-11-12 18:37:00 +00001596 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001597 __ jmp(false_target);
1598 }
David Brazdil0debae72015-11-12 18:37:00 +00001599
1600 if (fallthrough_target.IsLinked()) {
1601 __ Bind(&fallthrough_target);
1602 }
Mark Mendellc4701932015-04-10 13:18:51 -04001603}
1604
David Brazdil0debae72015-11-12 18:37:00 +00001605static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1606 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1607 // are set only strictly before `branch`. We can't use the eflags on long
1608 // conditions if they are materialized due to the complex branching.
1609 return cond->IsCondition() &&
1610 cond->GetNext() == branch &&
1611 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1612}
1613
Mark Mendell152408f2015-12-31 12:28:50 -05001614template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001615void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001616 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001617 LabelType* true_target,
1618 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001619 HInstruction* cond = instruction->InputAt(condition_input_index);
1620
1621 if (true_target == nullptr && false_target == nullptr) {
1622 // Nothing to do. The code always falls through.
1623 return;
1624 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001625 // Constant condition, statically compared against "true" (integer value 1).
1626 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001627 if (true_target != nullptr) {
1628 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001629 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001630 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001631 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001632 if (false_target != nullptr) {
1633 __ jmp(false_target);
1634 }
1635 }
1636 return;
1637 }
1638
1639 // The following code generates these patterns:
1640 // (1) true_target == nullptr && false_target != nullptr
1641 // - opposite condition true => branch to false_target
1642 // (2) true_target != nullptr && false_target == nullptr
1643 // - condition true => branch to true_target
1644 // (3) true_target != nullptr && false_target != nullptr
1645 // - condition true => branch to true_target
1646 // - branch to false_target
1647 if (IsBooleanValueOrMaterializedCondition(cond)) {
1648 if (AreEflagsSetFrom(cond, instruction)) {
1649 if (true_target == nullptr) {
1650 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1651 } else {
1652 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1653 }
1654 } else {
1655 // Materialized condition, compare against 0.
1656 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1657 if (lhs.IsRegister()) {
1658 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1659 } else {
1660 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1661 }
1662 if (true_target == nullptr) {
1663 __ j(kEqual, false_target);
1664 } else {
1665 __ j(kNotEqual, true_target);
1666 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001667 }
1668 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001669 // Condition has not been materialized, use its inputs as the
1670 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001671 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001672
David Brazdil0debae72015-11-12 18:37:00 +00001673 // If this is a long or FP comparison that has been folded into
1674 // the HCondition, generate the comparison directly.
1675 Primitive::Type type = condition->InputAt(0)->GetType();
1676 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1677 GenerateCompareTestAndBranch(condition, true_target, false_target);
1678 return;
1679 }
1680
1681 Location lhs = condition->GetLocations()->InAt(0);
1682 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001683 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001684 if (true_target == nullptr) {
1685 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1686 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001687 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001688 }
Dave Allison20dfc792014-06-16 20:44:29 -07001689 }
David Brazdil0debae72015-11-12 18:37:00 +00001690
1691 // If neither branch falls through (case 3), the conditional branch to `true_target`
1692 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1693 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001694 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001695 }
1696}
1697
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001698void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001699 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1700 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001701 locations->SetInAt(0, Location::Any());
1702 }
1703}
1704
1705void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001706 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1707 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1708 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1709 nullptr : codegen_->GetLabelOf(true_successor);
1710 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1711 nullptr : codegen_->GetLabelOf(false_successor);
1712 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001713}
1714
1715void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1716 LocationSummary* locations = new (GetGraph()->GetArena())
1717 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001718 InvokeRuntimeCallingConvention calling_convention;
1719 RegisterSet caller_saves = RegisterSet::Empty();
1720 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1721 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001722 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001723 locations->SetInAt(0, Location::Any());
1724 }
1725}
1726
1727void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001728 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001729 GenerateTestAndBranch<Label>(deoptimize,
1730 /* condition_input_index */ 0,
1731 slow_path->GetEntryLabel(),
1732 /* false_target */ nullptr);
1733}
1734
Mingyao Yang063fc772016-08-02 11:02:54 -07001735void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1736 LocationSummary* locations = new (GetGraph()->GetArena())
1737 LocationSummary(flag, LocationSummary::kNoCall);
1738 locations->SetOut(Location::RequiresRegister());
1739}
1740
1741void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1742 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1743 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1744}
1745
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001746static bool SelectCanUseCMOV(HSelect* select) {
1747 // There are no conditional move instructions for XMMs.
1748 if (Primitive::IsFloatingPointType(select->GetType())) {
1749 return false;
1750 }
1751
1752 // A FP condition doesn't generate the single CC that we need.
1753 HInstruction* condition = select->GetCondition();
1754 if (condition->IsCondition() &&
1755 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1756 return false;
1757 }
1758
1759 // We can generate a CMOV for this Select.
1760 return true;
1761}
1762
David Brazdil74eb1b22015-12-14 11:44:01 +00001763void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1764 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1765 if (Primitive::IsFloatingPointType(select->GetType())) {
1766 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001767 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001768 } else {
1769 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001770 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001771 if (select->InputAt(1)->IsConstant()) {
1772 locations->SetInAt(1, Location::RequiresRegister());
1773 } else {
1774 locations->SetInAt(1, Location::Any());
1775 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001776 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001777 locations->SetInAt(1, Location::Any());
1778 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001779 }
1780 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1781 locations->SetInAt(2, Location::RequiresRegister());
1782 }
1783 locations->SetOut(Location::SameAsFirstInput());
1784}
1785
1786void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1787 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001788 if (SelectCanUseCMOV(select)) {
1789 // If both the condition and the source types are integer, we can generate
1790 // a CMOV to implement Select.
1791 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001792 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001793 DCHECK(locations->InAt(0).Equals(locations->Out()));
1794
1795 HInstruction* select_condition = select->GetCondition();
1796 Condition cond = kNotEqual;
1797
1798 // Figure out how to test the 'condition'.
1799 if (select_condition->IsCondition()) {
1800 HCondition* condition = select_condition->AsCondition();
1801 if (!condition->IsEmittedAtUseSite()) {
1802 // This was a previously materialized condition.
1803 // Can we use the existing condition code?
1804 if (AreEflagsSetFrom(condition, select)) {
1805 // Materialization was the previous instruction. Condition codes are right.
1806 cond = X86_64IntegerCondition(condition->GetCondition());
1807 } else {
1808 // No, we have to recreate the condition code.
1809 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1810 __ testl(cond_reg, cond_reg);
1811 }
1812 } else {
1813 GenerateCompareTest(condition);
1814 cond = X86_64IntegerCondition(condition->GetCondition());
1815 }
1816 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001817 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001818 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1819 __ testl(cond_reg, cond_reg);
1820 }
1821
1822 // If the condition is true, overwrite the output, which already contains false.
1823 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001824 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1825 if (value_true_loc.IsRegister()) {
1826 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1827 } else {
1828 __ cmov(cond,
1829 value_false,
1830 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1831 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001832 } else {
1833 NearLabel false_target;
1834 GenerateTestAndBranch<NearLabel>(select,
1835 /* condition_input_index */ 2,
1836 /* true_target */ nullptr,
1837 &false_target);
1838 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1839 __ Bind(&false_target);
1840 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001841}
1842
David Srbecky0cf44932015-12-09 14:09:59 +00001843void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1844 new (GetGraph()->GetArena()) LocationSummary(info);
1845}
1846
David Srbeckyd28f4a02016-03-14 17:14:24 +00001847void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1848 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001849}
1850
1851void CodeGeneratorX86_64::GenerateNop() {
1852 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001853}
1854
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001855void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001856 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001857 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001858 // Handle the long/FP comparisons made in instruction simplification.
1859 switch (cond->InputAt(0)->GetType()) {
1860 case Primitive::kPrimLong:
1861 locations->SetInAt(0, Location::RequiresRegister());
1862 locations->SetInAt(1, Location::Any());
1863 break;
1864 case Primitive::kPrimFloat:
1865 case Primitive::kPrimDouble:
1866 locations->SetInAt(0, Location::RequiresFpuRegister());
1867 locations->SetInAt(1, Location::Any());
1868 break;
1869 default:
1870 locations->SetInAt(0, Location::RequiresRegister());
1871 locations->SetInAt(1, Location::Any());
1872 break;
1873 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001874 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001875 locations->SetOut(Location::RequiresRegister());
1876 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001877}
1878
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001879void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001880 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001881 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001882 }
Mark Mendellc4701932015-04-10 13:18:51 -04001883
1884 LocationSummary* locations = cond->GetLocations();
1885 Location lhs = locations->InAt(0);
1886 Location rhs = locations->InAt(1);
1887 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001888 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001889
1890 switch (cond->InputAt(0)->GetType()) {
1891 default:
1892 // Integer case.
1893
1894 // Clear output register: setcc only sets the low byte.
1895 __ xorl(reg, reg);
1896
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001897 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001898 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001899 return;
1900 case Primitive::kPrimLong:
1901 // Clear output register: setcc only sets the low byte.
1902 __ xorl(reg, reg);
1903
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001904 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001905 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001906 return;
1907 case Primitive::kPrimFloat: {
1908 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1909 if (rhs.IsConstant()) {
1910 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1911 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1912 } else if (rhs.IsStackSlot()) {
1913 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1914 } else {
1915 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1916 }
1917 GenerateFPJumps(cond, &true_label, &false_label);
1918 break;
1919 }
1920 case Primitive::kPrimDouble: {
1921 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1922 if (rhs.IsConstant()) {
1923 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1924 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1925 } else if (rhs.IsDoubleStackSlot()) {
1926 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1927 } else {
1928 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1929 }
1930 GenerateFPJumps(cond, &true_label, &false_label);
1931 break;
1932 }
1933 }
1934
1935 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001936 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001937
Roland Levillain4fa13f62015-07-06 18:11:54 +01001938 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001939 __ Bind(&false_label);
1940 __ xorl(reg, reg);
1941 __ jmp(&done_label);
1942
Roland Levillain4fa13f62015-07-06 18:11:54 +01001943 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001944 __ Bind(&true_label);
1945 __ movl(reg, Immediate(1));
1946 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001947}
1948
1949void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001950 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001951}
1952
1953void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001954 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001955}
1956
1957void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001958 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001959}
1960
1961void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001962 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001963}
1964
1965void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001966 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001967}
1968
1969void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001970 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001971}
1972
1973void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001974 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001975}
1976
1977void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001978 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001979}
1980
1981void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001982 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001983}
1984
1985void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001986 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001987}
1988
1989void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001990 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001991}
1992
1993void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001994 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001995}
1996
Aart Bike9f37602015-10-09 11:15:55 -07001997void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001998 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001999}
2000
2001void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002002 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002003}
2004
2005void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002006 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002007}
2008
2009void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002010 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002011}
2012
2013void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002014 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002015}
2016
2017void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002018 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002019}
2020
2021void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002022 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002023}
2024
2025void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002026 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002027}
2028
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002029void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002030 LocationSummary* locations =
2031 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002032 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002033 case Primitive::kPrimBoolean:
2034 case Primitive::kPrimByte:
2035 case Primitive::kPrimShort:
2036 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002037 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002038 case Primitive::kPrimLong: {
2039 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002040 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2042 break;
2043 }
2044 case Primitive::kPrimFloat:
2045 case Primitive::kPrimDouble: {
2046 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002047 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002048 locations->SetOut(Location::RequiresRegister());
2049 break;
2050 }
2051 default:
2052 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2053 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002054}
2055
2056void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002057 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002058 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002059 Location left = locations->InAt(0);
2060 Location right = locations->InAt(1);
2061
Mark Mendell0c9497d2015-08-21 09:30:05 -04002062 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002063 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002064 Condition less_cond = kLess;
2065
Calin Juravleddb7df22014-11-25 20:56:51 +00002066 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002067 case Primitive::kPrimBoolean:
2068 case Primitive::kPrimByte:
2069 case Primitive::kPrimShort:
2070 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002071 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002072 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002073 break;
2074 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002075 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002076 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002077 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002078 }
2079 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002080 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2081 if (right.IsConstant()) {
2082 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2083 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2084 } else if (right.IsStackSlot()) {
2085 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2086 } else {
2087 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2088 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002089 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002090 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002091 break;
2092 }
2093 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002094 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2095 if (right.IsConstant()) {
2096 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2097 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2098 } else if (right.IsDoubleStackSlot()) {
2099 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2100 } else {
2101 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2102 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002103 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002104 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002105 break;
2106 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002107 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002108 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002109 }
Aart Bika19616e2016-02-01 18:57:58 -08002110
Calin Juravleddb7df22014-11-25 20:56:51 +00002111 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002112 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002113 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002114
Calin Juravle91debbc2014-11-26 19:01:09 +00002115 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002116 __ movl(out, Immediate(1));
2117 __ jmp(&done);
2118
2119 __ Bind(&less);
2120 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002121
2122 __ Bind(&done);
2123}
2124
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002125void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002126 LocationSummary* locations =
2127 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002128 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002129}
2130
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002131void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002132 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002133}
2134
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002135void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2136 LocationSummary* locations =
2137 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2138 locations->SetOut(Location::ConstantLocation(constant));
2139}
2140
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002141void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002142 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002143}
2144
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002145void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002146 LocationSummary* locations =
2147 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002148 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002149}
2150
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002151void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002152 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002153}
2154
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002155void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2156 LocationSummary* locations =
2157 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2158 locations->SetOut(Location::ConstantLocation(constant));
2159}
2160
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002161void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002162 // Will be generated at use site.
2163}
2164
2165void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2166 LocationSummary* locations =
2167 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2168 locations->SetOut(Location::ConstantLocation(constant));
2169}
2170
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002171void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2172 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002173 // Will be generated at use site.
2174}
2175
Igor Murashkind01745e2017-04-05 16:40:31 -07002176void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2177 constructor_fence->SetLocations(nullptr);
2178}
2179
2180void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2181 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2182 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2183}
2184
Calin Juravle27df7582015-04-17 19:12:31 +01002185void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2186 memory_barrier->SetLocations(nullptr);
2187}
2188
2189void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002190 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002191}
2192
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002193void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2194 ret->SetLocations(nullptr);
2195}
2196
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002197void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002198 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002199}
2200
2201void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002202 LocationSummary* locations =
2203 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002204 switch (ret->InputAt(0)->GetType()) {
2205 case Primitive::kPrimBoolean:
2206 case Primitive::kPrimByte:
2207 case Primitive::kPrimChar:
2208 case Primitive::kPrimShort:
2209 case Primitive::kPrimInt:
2210 case Primitive::kPrimNot:
2211 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002212 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002213 break;
2214
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002215 case Primitive::kPrimFloat:
2216 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002217 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002218 break;
2219
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002220 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002221 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002222 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002223}
2224
2225void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2226 if (kIsDebugBuild) {
2227 switch (ret->InputAt(0)->GetType()) {
2228 case Primitive::kPrimBoolean:
2229 case Primitive::kPrimByte:
2230 case Primitive::kPrimChar:
2231 case Primitive::kPrimShort:
2232 case Primitive::kPrimInt:
2233 case Primitive::kPrimNot:
2234 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002235 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002236 break;
2237
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002238 case Primitive::kPrimFloat:
2239 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002240 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002241 XMM0);
2242 break;
2243
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002245 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002246 }
2247 }
2248 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002249}
2250
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002251Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2252 switch (type) {
2253 case Primitive::kPrimBoolean:
2254 case Primitive::kPrimByte:
2255 case Primitive::kPrimChar:
2256 case Primitive::kPrimShort:
2257 case Primitive::kPrimInt:
2258 case Primitive::kPrimNot:
2259 case Primitive::kPrimLong:
2260 return Location::RegisterLocation(RAX);
2261
2262 case Primitive::kPrimVoid:
2263 return Location::NoLocation();
2264
2265 case Primitive::kPrimDouble:
2266 case Primitive::kPrimFloat:
2267 return Location::FpuRegisterLocation(XMM0);
2268 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002269
2270 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002271}
2272
2273Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2274 return Location::RegisterLocation(kMethodRegisterArgument);
2275}
2276
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002277Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002278 switch (type) {
2279 case Primitive::kPrimBoolean:
2280 case Primitive::kPrimByte:
2281 case Primitive::kPrimChar:
2282 case Primitive::kPrimShort:
2283 case Primitive::kPrimInt:
2284 case Primitive::kPrimNot: {
2285 uint32_t index = gp_index_++;
2286 stack_index_++;
2287 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002288 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002289 } else {
2290 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2291 }
2292 }
2293
2294 case Primitive::kPrimLong: {
2295 uint32_t index = gp_index_;
2296 stack_index_ += 2;
2297 if (index < calling_convention.GetNumberOfRegisters()) {
2298 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002299 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002300 } else {
2301 gp_index_ += 2;
2302 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2303 }
2304 }
2305
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002306 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002307 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002308 stack_index_++;
2309 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002310 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002311 } else {
2312 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2313 }
2314 }
2315
2316 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002317 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002318 stack_index_ += 2;
2319 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002320 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002321 } else {
2322 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2323 }
2324 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002325
2326 case Primitive::kPrimVoid:
2327 LOG(FATAL) << "Unexpected parameter type " << type;
2328 break;
2329 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002330 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002331}
2332
Calin Juravle175dc732015-08-25 15:42:32 +01002333void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2334 // The trampoline uses the same calling convention as dex calling conventions,
2335 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2336 // the method_idx.
2337 HandleInvoke(invoke);
2338}
2339
2340void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2341 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2342}
2343
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002344void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002345 // Explicit clinit checks triggered by static invokes must have been pruned by
2346 // art::PrepareForRegisterAllocation.
2347 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002348
Mark Mendellfb8d2792015-03-31 22:16:59 -04002349 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002350 if (intrinsic.TryDispatch(invoke)) {
2351 return;
2352 }
2353
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002354 HandleInvoke(invoke);
2355}
2356
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002357static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2358 if (invoke->GetLocations()->Intrinsified()) {
2359 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2360 intrinsic.Dispatch(invoke);
2361 return true;
2362 }
2363 return false;
2364}
2365
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002366void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002367 // Explicit clinit checks triggered by static invokes must have been pruned by
2368 // art::PrepareForRegisterAllocation.
2369 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002370
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002371 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2372 return;
2373 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002374
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002375 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002376 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002377 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002378 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002379}
2380
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002381void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002382 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002383 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002384}
2385
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002386void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002387 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002388 if (intrinsic.TryDispatch(invoke)) {
2389 return;
2390 }
2391
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002392 HandleInvoke(invoke);
2393}
2394
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002395void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002396 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2397 return;
2398 }
2399
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002400 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002401 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002402 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002403}
2404
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002405void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2406 HandleInvoke(invoke);
2407 // Add the hidden argument.
2408 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2409}
2410
2411void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2412 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002413 LocationSummary* locations = invoke->GetLocations();
2414 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2415 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002416 Location receiver = locations->InAt(0);
2417 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2418
Roland Levillain0d5a2812015-11-13 10:07:31 +00002419 // Set the hidden argument. This is safe to do this here, as RAX
2420 // won't be modified thereafter, before the `call` instruction.
2421 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002422 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002423
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002424 if (receiver.IsStackSlot()) {
2425 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002426 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002427 __ movl(temp, Address(temp, class_offset));
2428 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002429 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002430 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002431 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002432 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002433 // Instead of simply (possibly) unpoisoning `temp` here, we should
2434 // emit a read barrier for the previous class reference load.
2435 // However this is not required in practice, as this is an
2436 // intermediate/temporary reference and because the current
2437 // concurrent copying collector keeps the from-space memory
2438 // intact/accessible until the end of the marking phase (the
2439 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002440 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002441 // temp = temp->GetAddressOfIMT()
2442 __ movq(temp,
2443 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2444 // temp = temp->GetImtEntryAt(method_offset);
2445 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002446 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002447 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002448 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002449 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002450 __ call(Address(
2451 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002452
2453 DCHECK(!codegen_->IsLeafMethod());
2454 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2455}
2456
Orion Hodsonac141392017-01-13 11:53:47 +00002457void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2458 HandleInvoke(invoke);
2459}
2460
2461void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2462 codegen_->GenerateInvokePolymorphicCall(invoke);
2463}
2464
Roland Levillain88cb1752014-10-20 16:36:47 +01002465void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2466 LocationSummary* locations =
2467 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2468 switch (neg->GetResultType()) {
2469 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002470 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002471 locations->SetInAt(0, Location::RequiresRegister());
2472 locations->SetOut(Location::SameAsFirstInput());
2473 break;
2474
Roland Levillain88cb1752014-10-20 16:36:47 +01002475 case Primitive::kPrimFloat:
2476 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002477 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002478 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002479 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002480 break;
2481
2482 default:
2483 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2484 }
2485}
2486
2487void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2488 LocationSummary* locations = neg->GetLocations();
2489 Location out = locations->Out();
2490 Location in = locations->InAt(0);
2491 switch (neg->GetResultType()) {
2492 case Primitive::kPrimInt:
2493 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002494 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002495 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002496 break;
2497
2498 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002499 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002500 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002501 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002502 break;
2503
Roland Levillain5368c212014-11-27 15:03:41 +00002504 case Primitive::kPrimFloat: {
2505 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002506 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002507 // Implement float negation with an exclusive or with value
2508 // 0x80000000 (mask for bit 31, representing the sign of a
2509 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002510 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002511 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002512 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002513 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002514
Roland Levillain5368c212014-11-27 15:03:41 +00002515 case Primitive::kPrimDouble: {
2516 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002517 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002518 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002519 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002520 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002521 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002522 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002523 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002524 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002525
2526 default:
2527 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2528 }
2529}
2530
Roland Levillaindff1f282014-11-05 14:15:05 +00002531void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2532 LocationSummary* locations =
2533 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2534 Primitive::Type result_type = conversion->GetResultType();
2535 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002536 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002537
David Brazdilb2bd1c52015-03-25 11:17:37 +00002538 // The Java language does not allow treating boolean as an integral type but
2539 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002540
Roland Levillaindff1f282014-11-05 14:15:05 +00002541 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002542 case Primitive::kPrimByte:
2543 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002544 case Primitive::kPrimLong:
2545 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002546 case Primitive::kPrimBoolean:
2547 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002548 case Primitive::kPrimShort:
2549 case Primitive::kPrimInt:
2550 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002551 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002552 locations->SetInAt(0, Location::Any());
2553 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2554 break;
2555
2556 default:
2557 LOG(FATAL) << "Unexpected type conversion from " << input_type
2558 << " to " << result_type;
2559 }
2560 break;
2561
Roland Levillain01a8d712014-11-14 16:27:39 +00002562 case Primitive::kPrimShort:
2563 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002564 case Primitive::kPrimLong:
2565 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002566 case Primitive::kPrimBoolean:
2567 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002568 case Primitive::kPrimByte:
2569 case Primitive::kPrimInt:
2570 case Primitive::kPrimChar:
2571 // Processing a Dex `int-to-short' instruction.
2572 locations->SetInAt(0, Location::Any());
2573 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2574 break;
2575
2576 default:
2577 LOG(FATAL) << "Unexpected type conversion from " << input_type
2578 << " to " << result_type;
2579 }
2580 break;
2581
Roland Levillain946e1432014-11-11 17:35:19 +00002582 case Primitive::kPrimInt:
2583 switch (input_type) {
2584 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002585 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002586 locations->SetInAt(0, Location::Any());
2587 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2588 break;
2589
2590 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002591 // Processing a Dex `float-to-int' instruction.
2592 locations->SetInAt(0, Location::RequiresFpuRegister());
2593 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002594 break;
2595
Roland Levillain946e1432014-11-11 17:35:19 +00002596 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002597 // Processing a Dex `double-to-int' instruction.
2598 locations->SetInAt(0, Location::RequiresFpuRegister());
2599 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002600 break;
2601
2602 default:
2603 LOG(FATAL) << "Unexpected type conversion from " << input_type
2604 << " to " << result_type;
2605 }
2606 break;
2607
Roland Levillaindff1f282014-11-05 14:15:05 +00002608 case Primitive::kPrimLong:
2609 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002610 case Primitive::kPrimBoolean:
2611 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002612 case Primitive::kPrimByte:
2613 case Primitive::kPrimShort:
2614 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002615 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002616 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002617 // TODO: We would benefit from a (to-be-implemented)
2618 // Location::RegisterOrStackSlot requirement for this input.
2619 locations->SetInAt(0, Location::RequiresRegister());
2620 locations->SetOut(Location::RequiresRegister());
2621 break;
2622
2623 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002624 // Processing a Dex `float-to-long' instruction.
2625 locations->SetInAt(0, Location::RequiresFpuRegister());
2626 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002627 break;
2628
Roland Levillaindff1f282014-11-05 14:15:05 +00002629 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002630 // Processing a Dex `double-to-long' instruction.
2631 locations->SetInAt(0, Location::RequiresFpuRegister());
2632 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002633 break;
2634
2635 default:
2636 LOG(FATAL) << "Unexpected type conversion from " << input_type
2637 << " to " << result_type;
2638 }
2639 break;
2640
Roland Levillain981e4542014-11-14 11:47:14 +00002641 case Primitive::kPrimChar:
2642 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002643 case Primitive::kPrimLong:
2644 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002645 case Primitive::kPrimBoolean:
2646 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002647 case Primitive::kPrimByte:
2648 case Primitive::kPrimShort:
2649 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002650 // Processing a Dex `int-to-char' instruction.
2651 locations->SetInAt(0, Location::Any());
2652 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2653 break;
2654
2655 default:
2656 LOG(FATAL) << "Unexpected type conversion from " << input_type
2657 << " to " << result_type;
2658 }
2659 break;
2660
Roland Levillaindff1f282014-11-05 14:15:05 +00002661 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002662 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002663 case Primitive::kPrimBoolean:
2664 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002665 case Primitive::kPrimByte:
2666 case Primitive::kPrimShort:
2667 case Primitive::kPrimInt:
2668 case Primitive::kPrimChar:
2669 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002670 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002671 locations->SetOut(Location::RequiresFpuRegister());
2672 break;
2673
2674 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002675 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002676 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002677 locations->SetOut(Location::RequiresFpuRegister());
2678 break;
2679
Roland Levillaincff13742014-11-17 14:32:17 +00002680 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002681 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002682 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002683 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002684 break;
2685
2686 default:
2687 LOG(FATAL) << "Unexpected type conversion from " << input_type
2688 << " to " << result_type;
2689 };
2690 break;
2691
Roland Levillaindff1f282014-11-05 14:15:05 +00002692 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002693 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002694 case Primitive::kPrimBoolean:
2695 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002696 case Primitive::kPrimByte:
2697 case Primitive::kPrimShort:
2698 case Primitive::kPrimInt:
2699 case Primitive::kPrimChar:
2700 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002701 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002702 locations->SetOut(Location::RequiresFpuRegister());
2703 break;
2704
2705 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002706 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002707 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002708 locations->SetOut(Location::RequiresFpuRegister());
2709 break;
2710
Roland Levillaincff13742014-11-17 14:32:17 +00002711 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002712 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002713 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002714 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002715 break;
2716
2717 default:
2718 LOG(FATAL) << "Unexpected type conversion from " << input_type
2719 << " to " << result_type;
2720 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002721 break;
2722
2723 default:
2724 LOG(FATAL) << "Unexpected type conversion from " << input_type
2725 << " to " << result_type;
2726 }
2727}
2728
2729void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2730 LocationSummary* locations = conversion->GetLocations();
2731 Location out = locations->Out();
2732 Location in = locations->InAt(0);
2733 Primitive::Type result_type = conversion->GetResultType();
2734 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002735 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002736 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002737 case Primitive::kPrimByte:
2738 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002739 case Primitive::kPrimLong:
2740 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002741 case Primitive::kPrimBoolean:
2742 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002743 case Primitive::kPrimShort:
2744 case Primitive::kPrimInt:
2745 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002746 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002747 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002748 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002749 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002750 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002751 Address(CpuRegister(RSP), in.GetStackIndex()));
2752 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002753 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002754 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002755 }
2756 break;
2757
2758 default:
2759 LOG(FATAL) << "Unexpected type conversion from " << input_type
2760 << " to " << result_type;
2761 }
2762 break;
2763
Roland Levillain01a8d712014-11-14 16:27:39 +00002764 case Primitive::kPrimShort:
2765 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002766 case Primitive::kPrimLong:
2767 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002768 case Primitive::kPrimBoolean:
2769 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002770 case Primitive::kPrimByte:
2771 case Primitive::kPrimInt:
2772 case Primitive::kPrimChar:
2773 // Processing a Dex `int-to-short' instruction.
2774 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002775 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002776 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002777 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002778 Address(CpuRegister(RSP), in.GetStackIndex()));
2779 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002780 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002781 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002782 }
2783 break;
2784
2785 default:
2786 LOG(FATAL) << "Unexpected type conversion from " << input_type
2787 << " to " << result_type;
2788 }
2789 break;
2790
Roland Levillain946e1432014-11-11 17:35:19 +00002791 case Primitive::kPrimInt:
2792 switch (input_type) {
2793 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002794 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002795 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002796 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002797 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002798 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002799 Address(CpuRegister(RSP), in.GetStackIndex()));
2800 } else {
2801 DCHECK(in.IsConstant());
2802 DCHECK(in.GetConstant()->IsLongConstant());
2803 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002804 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002805 }
2806 break;
2807
Roland Levillain3f8f9362014-12-02 17:45:01 +00002808 case Primitive::kPrimFloat: {
2809 // Processing a Dex `float-to-int' instruction.
2810 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2811 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002812 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002813
2814 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002815 // if input >= (float)INT_MAX goto done
2816 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002817 __ j(kAboveEqual, &done);
2818 // if input == NaN goto nan
2819 __ j(kUnordered, &nan);
2820 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002821 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002822 __ jmp(&done);
2823 __ Bind(&nan);
2824 // output = 0
2825 __ xorl(output, output);
2826 __ Bind(&done);
2827 break;
2828 }
2829
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002830 case Primitive::kPrimDouble: {
2831 // Processing a Dex `double-to-int' instruction.
2832 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2833 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002834 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002835
2836 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002837 // if input >= (double)INT_MAX goto done
2838 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002839 __ j(kAboveEqual, &done);
2840 // if input == NaN goto nan
2841 __ j(kUnordered, &nan);
2842 // output = double-to-int-truncate(input)
2843 __ cvttsd2si(output, input);
2844 __ jmp(&done);
2845 __ Bind(&nan);
2846 // output = 0
2847 __ xorl(output, output);
2848 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002849 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002850 }
Roland Levillain946e1432014-11-11 17:35:19 +00002851
2852 default:
2853 LOG(FATAL) << "Unexpected type conversion from " << input_type
2854 << " to " << result_type;
2855 }
2856 break;
2857
Roland Levillaindff1f282014-11-05 14:15:05 +00002858 case Primitive::kPrimLong:
2859 switch (input_type) {
2860 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002861 case Primitive::kPrimBoolean:
2862 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002863 case Primitive::kPrimByte:
2864 case Primitive::kPrimShort:
2865 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002866 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002867 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002868 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002869 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002870 break;
2871
Roland Levillain624279f2014-12-04 11:54:28 +00002872 case Primitive::kPrimFloat: {
2873 // Processing a Dex `float-to-long' instruction.
2874 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2875 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002876 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002877
Mark Mendell92e83bf2015-05-07 11:25:03 -04002878 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002879 // if input >= (float)LONG_MAX goto done
2880 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002881 __ j(kAboveEqual, &done);
2882 // if input == NaN goto nan
2883 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002884 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002885 __ cvttss2si(output, input, true);
2886 __ jmp(&done);
2887 __ Bind(&nan);
2888 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002889 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002890 __ Bind(&done);
2891 break;
2892 }
2893
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002894 case Primitive::kPrimDouble: {
2895 // Processing a Dex `double-to-long' instruction.
2896 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2897 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002898 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002899
Mark Mendell92e83bf2015-05-07 11:25:03 -04002900 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002901 // if input >= (double)LONG_MAX goto done
2902 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002903 __ j(kAboveEqual, &done);
2904 // if input == NaN goto nan
2905 __ j(kUnordered, &nan);
2906 // output = double-to-long-truncate(input)
2907 __ cvttsd2si(output, input, true);
2908 __ jmp(&done);
2909 __ Bind(&nan);
2910 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002911 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002912 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002913 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002914 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002915
2916 default:
2917 LOG(FATAL) << "Unexpected type conversion from " << input_type
2918 << " to " << result_type;
2919 }
2920 break;
2921
Roland Levillain981e4542014-11-14 11:47:14 +00002922 case Primitive::kPrimChar:
2923 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002924 case Primitive::kPrimLong:
2925 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002926 case Primitive::kPrimBoolean:
2927 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002928 case Primitive::kPrimByte:
2929 case Primitive::kPrimShort:
2930 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002931 // Processing a Dex `int-to-char' instruction.
2932 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002933 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002934 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002935 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002936 Address(CpuRegister(RSP), in.GetStackIndex()));
2937 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002938 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002939 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002940 }
2941 break;
2942
2943 default:
2944 LOG(FATAL) << "Unexpected type conversion from " << input_type
2945 << " to " << result_type;
2946 }
2947 break;
2948
Roland Levillaindff1f282014-11-05 14:15:05 +00002949 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002950 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002951 case Primitive::kPrimBoolean:
2952 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002953 case Primitive::kPrimByte:
2954 case Primitive::kPrimShort:
2955 case Primitive::kPrimInt:
2956 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002957 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002958 if (in.IsRegister()) {
2959 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2960 } else if (in.IsConstant()) {
2961 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2962 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002963 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002964 } else {
2965 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2966 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2967 }
Roland Levillaincff13742014-11-17 14:32:17 +00002968 break;
2969
2970 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002971 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002972 if (in.IsRegister()) {
2973 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2974 } else if (in.IsConstant()) {
2975 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2976 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002977 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002978 } else {
2979 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2980 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2981 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002982 break;
2983
Roland Levillaincff13742014-11-17 14:32:17 +00002984 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002985 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002986 if (in.IsFpuRegister()) {
2987 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2988 } else if (in.IsConstant()) {
2989 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2990 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002991 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002992 } else {
2993 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2994 Address(CpuRegister(RSP), in.GetStackIndex()));
2995 }
Roland Levillaincff13742014-11-17 14:32:17 +00002996 break;
2997
2998 default:
2999 LOG(FATAL) << "Unexpected type conversion from " << input_type
3000 << " to " << result_type;
3001 };
3002 break;
3003
Roland Levillaindff1f282014-11-05 14:15:05 +00003004 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00003005 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003006 case Primitive::kPrimBoolean:
3007 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00003008 case Primitive::kPrimByte:
3009 case Primitive::kPrimShort:
3010 case Primitive::kPrimInt:
3011 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00003012 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003013 if (in.IsRegister()) {
3014 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3015 } else if (in.IsConstant()) {
3016 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3017 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003018 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003019 } else {
3020 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3021 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3022 }
Roland Levillaincff13742014-11-17 14:32:17 +00003023 break;
3024
3025 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003026 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003027 if (in.IsRegister()) {
3028 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3029 } else if (in.IsConstant()) {
3030 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3031 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003032 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003033 } else {
3034 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3035 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3036 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003037 break;
3038
Roland Levillaincff13742014-11-17 14:32:17 +00003039 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003040 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003041 if (in.IsFpuRegister()) {
3042 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3043 } else if (in.IsConstant()) {
3044 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3045 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003046 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003047 } else {
3048 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3049 Address(CpuRegister(RSP), in.GetStackIndex()));
3050 }
Roland Levillaincff13742014-11-17 14:32:17 +00003051 break;
3052
3053 default:
3054 LOG(FATAL) << "Unexpected type conversion from " << input_type
3055 << " to " << result_type;
3056 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003057 break;
3058
3059 default:
3060 LOG(FATAL) << "Unexpected type conversion from " << input_type
3061 << " to " << result_type;
3062 }
3063}
3064
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003066 LocationSummary* locations =
3067 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003068 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003069 case Primitive::kPrimInt: {
3070 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003071 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3072 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003073 break;
3074 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003075
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003076 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003077 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003078 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003079 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003080 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081 break;
3082 }
3083
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003084 case Primitive::kPrimDouble:
3085 case Primitive::kPrimFloat: {
3086 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003087 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003088 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003089 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003090 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003091
3092 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003093 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003094 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003095}
3096
3097void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3098 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003099 Location first = locations->InAt(0);
3100 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003101 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003102
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003103 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003104 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003105 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003106 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3107 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003108 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3109 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003110 } else {
3111 __ leal(out.AsRegister<CpuRegister>(), Address(
3112 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3113 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003114 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003115 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3116 __ addl(out.AsRegister<CpuRegister>(),
3117 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3118 } else {
3119 __ leal(out.AsRegister<CpuRegister>(), Address(
3120 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3121 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003122 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003123 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003124 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003125 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003126 break;
3127 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003128
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003129 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003130 if (second.IsRegister()) {
3131 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3132 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003133 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3134 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003135 } else {
3136 __ leaq(out.AsRegister<CpuRegister>(), Address(
3137 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3138 }
3139 } else {
3140 DCHECK(second.IsConstant());
3141 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3142 int32_t int32_value = Low32Bits(value);
3143 DCHECK_EQ(int32_value, value);
3144 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3145 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3146 } else {
3147 __ leaq(out.AsRegister<CpuRegister>(), Address(
3148 first.AsRegister<CpuRegister>(), int32_value));
3149 }
3150 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003151 break;
3152 }
3153
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003154 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003155 if (second.IsFpuRegister()) {
3156 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3157 } else if (second.IsConstant()) {
3158 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003159 codegen_->LiteralFloatAddress(
3160 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003161 } else {
3162 DCHECK(second.IsStackSlot());
3163 __ addss(first.AsFpuRegister<XmmRegister>(),
3164 Address(CpuRegister(RSP), second.GetStackIndex()));
3165 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003166 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003167 }
3168
3169 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003170 if (second.IsFpuRegister()) {
3171 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3172 } else if (second.IsConstant()) {
3173 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003174 codegen_->LiteralDoubleAddress(
3175 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003176 } else {
3177 DCHECK(second.IsDoubleStackSlot());
3178 __ addsd(first.AsFpuRegister<XmmRegister>(),
3179 Address(CpuRegister(RSP), second.GetStackIndex()));
3180 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003181 break;
3182 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003183
3184 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003185 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003186 }
3187}
3188
3189void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003190 LocationSummary* locations =
3191 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003192 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003193 case Primitive::kPrimInt: {
3194 locations->SetInAt(0, Location::RequiresRegister());
3195 locations->SetInAt(1, Location::Any());
3196 locations->SetOut(Location::SameAsFirstInput());
3197 break;
3198 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003199 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003200 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003201 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003202 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003203 break;
3204 }
Calin Juravle11351682014-10-23 15:38:15 +01003205 case Primitive::kPrimFloat:
3206 case Primitive::kPrimDouble: {
3207 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003208 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003209 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003210 break;
Calin Juravle11351682014-10-23 15:38:15 +01003211 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003212 default:
Calin Juravle11351682014-10-23 15:38:15 +01003213 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003214 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003215}
3216
3217void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3218 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003219 Location first = locations->InAt(0);
3220 Location second = locations->InAt(1);
3221 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003222 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003223 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003224 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003225 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003226 } else if (second.IsConstant()) {
3227 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003228 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003229 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003230 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003231 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003232 break;
3233 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003234 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003235 if (second.IsConstant()) {
3236 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3237 DCHECK(IsInt<32>(value));
3238 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3239 } else {
3240 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3241 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003242 break;
3243 }
3244
Calin Juravle11351682014-10-23 15:38:15 +01003245 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003246 if (second.IsFpuRegister()) {
3247 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3248 } else if (second.IsConstant()) {
3249 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003250 codegen_->LiteralFloatAddress(
3251 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003252 } else {
3253 DCHECK(second.IsStackSlot());
3254 __ subss(first.AsFpuRegister<XmmRegister>(),
3255 Address(CpuRegister(RSP), second.GetStackIndex()));
3256 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003257 break;
Calin Juravle11351682014-10-23 15:38:15 +01003258 }
3259
3260 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003261 if (second.IsFpuRegister()) {
3262 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3263 } else if (second.IsConstant()) {
3264 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003265 codegen_->LiteralDoubleAddress(
3266 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003267 } else {
3268 DCHECK(second.IsDoubleStackSlot());
3269 __ subsd(first.AsFpuRegister<XmmRegister>(),
3270 Address(CpuRegister(RSP), second.GetStackIndex()));
3271 }
Calin Juravle11351682014-10-23 15:38:15 +01003272 break;
3273 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003274
3275 default:
Calin Juravle11351682014-10-23 15:38:15 +01003276 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003277 }
3278}
3279
Calin Juravle34bacdf2014-10-07 20:23:36 +01003280void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3281 LocationSummary* locations =
3282 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3283 switch (mul->GetResultType()) {
3284 case Primitive::kPrimInt: {
3285 locations->SetInAt(0, Location::RequiresRegister());
3286 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003287 if (mul->InputAt(1)->IsIntConstant()) {
3288 // Can use 3 operand multiply.
3289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3290 } else {
3291 locations->SetOut(Location::SameAsFirstInput());
3292 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003293 break;
3294 }
3295 case Primitive::kPrimLong: {
3296 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003297 locations->SetInAt(1, Location::Any());
3298 if (mul->InputAt(1)->IsLongConstant() &&
3299 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003300 // Can use 3 operand multiply.
3301 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3302 } else {
3303 locations->SetOut(Location::SameAsFirstInput());
3304 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003305 break;
3306 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003307 case Primitive::kPrimFloat:
3308 case Primitive::kPrimDouble: {
3309 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003310 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003311 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003312 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003313 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003314
3315 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003316 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003317 }
3318}
3319
3320void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3321 LocationSummary* locations = mul->GetLocations();
3322 Location first = locations->InAt(0);
3323 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003324 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003325 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003326 case Primitive::kPrimInt:
3327 // The constant may have ended up in a register, so test explicitly to avoid
3328 // problems where the output may not be the same as the first operand.
3329 if (mul->InputAt(1)->IsIntConstant()) {
3330 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3331 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3332 } else if (second.IsRegister()) {
3333 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003334 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003335 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003336 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003337 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003338 __ imull(first.AsRegister<CpuRegister>(),
3339 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003340 }
3341 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003342 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003343 // The constant may have ended up in a register, so test explicitly to avoid
3344 // problems where the output may not be the same as the first operand.
3345 if (mul->InputAt(1)->IsLongConstant()) {
3346 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3347 if (IsInt<32>(value)) {
3348 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3349 Immediate(static_cast<int32_t>(value)));
3350 } else {
3351 // Have to use the constant area.
3352 DCHECK(first.Equals(out));
3353 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3354 }
3355 } else if (second.IsRegister()) {
3356 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003357 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003358 } else {
3359 DCHECK(second.IsDoubleStackSlot());
3360 DCHECK(first.Equals(out));
3361 __ imulq(first.AsRegister<CpuRegister>(),
3362 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003363 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003364 break;
3365 }
3366
Calin Juravleb5bfa962014-10-21 18:02:24 +01003367 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003368 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003369 if (second.IsFpuRegister()) {
3370 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3371 } else if (second.IsConstant()) {
3372 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003373 codegen_->LiteralFloatAddress(
3374 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003375 } else {
3376 DCHECK(second.IsStackSlot());
3377 __ mulss(first.AsFpuRegister<XmmRegister>(),
3378 Address(CpuRegister(RSP), second.GetStackIndex()));
3379 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003380 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003381 }
3382
3383 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003384 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003385 if (second.IsFpuRegister()) {
3386 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3387 } else if (second.IsConstant()) {
3388 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003389 codegen_->LiteralDoubleAddress(
3390 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003391 } else {
3392 DCHECK(second.IsDoubleStackSlot());
3393 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3394 Address(CpuRegister(RSP), second.GetStackIndex()));
3395 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003396 break;
3397 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003398
3399 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003400 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003401 }
3402}
3403
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003404void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3405 uint32_t stack_adjustment, bool is_float) {
3406 if (source.IsStackSlot()) {
3407 DCHECK(is_float);
3408 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3409 } else if (source.IsDoubleStackSlot()) {
3410 DCHECK(!is_float);
3411 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3412 } else {
3413 // Write the value to the temporary location on the stack and load to FP stack.
3414 if (is_float) {
3415 Location stack_temp = Location::StackSlot(temp_offset);
3416 codegen_->Move(stack_temp, source);
3417 __ flds(Address(CpuRegister(RSP), temp_offset));
3418 } else {
3419 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3420 codegen_->Move(stack_temp, source);
3421 __ fldl(Address(CpuRegister(RSP), temp_offset));
3422 }
3423 }
3424}
3425
3426void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3427 Primitive::Type type = rem->GetResultType();
3428 bool is_float = type == Primitive::kPrimFloat;
3429 size_t elem_size = Primitive::ComponentSize(type);
3430 LocationSummary* locations = rem->GetLocations();
3431 Location first = locations->InAt(0);
3432 Location second = locations->InAt(1);
3433 Location out = locations->Out();
3434
3435 // Create stack space for 2 elements.
3436 // TODO: enhance register allocator to ask for stack temporaries.
3437 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3438
3439 // Load the values to the FP stack in reverse order, using temporaries if needed.
3440 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3441 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3442
3443 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003444 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003445 __ Bind(&retry);
3446 __ fprem();
3447
3448 // Move FP status to AX.
3449 __ fstsw();
3450
3451 // And see if the argument reduction is complete. This is signaled by the
3452 // C2 FPU flag bit set to 0.
3453 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3454 __ j(kNotEqual, &retry);
3455
3456 // We have settled on the final value. Retrieve it into an XMM register.
3457 // Store FP top of stack to real stack.
3458 if (is_float) {
3459 __ fsts(Address(CpuRegister(RSP), 0));
3460 } else {
3461 __ fstl(Address(CpuRegister(RSP), 0));
3462 }
3463
3464 // Pop the 2 items from the FP stack.
3465 __ fucompp();
3466
3467 // Load the value from the stack into an XMM register.
3468 DCHECK(out.IsFpuRegister()) << out;
3469 if (is_float) {
3470 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3471 } else {
3472 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3473 }
3474
3475 // And remove the temporary stack space we allocated.
3476 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3477}
3478
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003479void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3480 DCHECK(instruction->IsDiv() || instruction->IsRem());
3481
3482 LocationSummary* locations = instruction->GetLocations();
3483 Location second = locations->InAt(1);
3484 DCHECK(second.IsConstant());
3485
3486 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3487 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003488 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003489
3490 DCHECK(imm == 1 || imm == -1);
3491
3492 switch (instruction->GetResultType()) {
3493 case Primitive::kPrimInt: {
3494 if (instruction->IsRem()) {
3495 __ xorl(output_register, output_register);
3496 } else {
3497 __ movl(output_register, input_register);
3498 if (imm == -1) {
3499 __ negl(output_register);
3500 }
3501 }
3502 break;
3503 }
3504
3505 case Primitive::kPrimLong: {
3506 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003507 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003508 } else {
3509 __ movq(output_register, input_register);
3510 if (imm == -1) {
3511 __ negq(output_register);
3512 }
3513 }
3514 break;
3515 }
3516
3517 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003518 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003519 }
3520}
3521
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003522void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003523 LocationSummary* locations = instruction->GetLocations();
3524 Location second = locations->InAt(1);
3525
3526 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3527 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3528
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003529 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003530 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3531 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003532
3533 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3534
3535 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003536 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003537 __ testl(numerator, numerator);
3538 __ cmov(kGreaterEqual, tmp, numerator);
3539 int shift = CTZ(imm);
3540 __ sarl(tmp, Immediate(shift));
3541
3542 if (imm < 0) {
3543 __ negl(tmp);
3544 }
3545
3546 __ movl(output_register, tmp);
3547 } else {
3548 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3549 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3550
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003551 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003552 __ addq(rdx, numerator);
3553 __ testq(numerator, numerator);
3554 __ cmov(kGreaterEqual, rdx, numerator);
3555 int shift = CTZ(imm);
3556 __ sarq(rdx, Immediate(shift));
3557
3558 if (imm < 0) {
3559 __ negq(rdx);
3560 }
3561
3562 __ movq(output_register, rdx);
3563 }
3564}
3565
3566void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3567 DCHECK(instruction->IsDiv() || instruction->IsRem());
3568
3569 LocationSummary* locations = instruction->GetLocations();
3570 Location second = locations->InAt(1);
3571
3572 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3573 : locations->GetTemp(0).AsRegister<CpuRegister>();
3574 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3575 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3576 : locations->Out().AsRegister<CpuRegister>();
3577 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3578
3579 DCHECK_EQ(RAX, eax.AsRegister());
3580 DCHECK_EQ(RDX, edx.AsRegister());
3581 if (instruction->IsDiv()) {
3582 DCHECK_EQ(RAX, out.AsRegister());
3583 } else {
3584 DCHECK_EQ(RDX, out.AsRegister());
3585 }
3586
3587 int64_t magic;
3588 int shift;
3589
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003590 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003591 if (instruction->GetResultType() == Primitive::kPrimInt) {
3592 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3593
3594 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3595
3596 __ movl(numerator, eax);
3597
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003598 __ movl(eax, Immediate(magic));
3599 __ imull(numerator);
3600
3601 if (imm > 0 && magic < 0) {
3602 __ addl(edx, numerator);
3603 } else if (imm < 0 && magic > 0) {
3604 __ subl(edx, numerator);
3605 }
3606
3607 if (shift != 0) {
3608 __ sarl(edx, Immediate(shift));
3609 }
3610
3611 __ movl(eax, edx);
3612 __ shrl(edx, Immediate(31));
3613 __ addl(edx, eax);
3614
3615 if (instruction->IsRem()) {
3616 __ movl(eax, numerator);
3617 __ imull(edx, Immediate(imm));
3618 __ subl(eax, edx);
3619 __ movl(edx, eax);
3620 } else {
3621 __ movl(eax, edx);
3622 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003623 } else {
3624 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3625
3626 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3627
3628 CpuRegister rax = eax;
3629 CpuRegister rdx = edx;
3630
3631 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3632
3633 // Save the numerator.
3634 __ movq(numerator, rax);
3635
3636 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003637 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003638
3639 // RDX:RAX = magic * numerator
3640 __ imulq(numerator);
3641
3642 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003643 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003644 __ addq(rdx, numerator);
3645 } else if (imm < 0 && magic > 0) {
3646 // RDX -= numerator
3647 __ subq(rdx, numerator);
3648 }
3649
3650 // Shift if needed.
3651 if (shift != 0) {
3652 __ sarq(rdx, Immediate(shift));
3653 }
3654
3655 // RDX += 1 if RDX < 0
3656 __ movq(rax, rdx);
3657 __ shrq(rdx, Immediate(63));
3658 __ addq(rdx, rax);
3659
3660 if (instruction->IsRem()) {
3661 __ movq(rax, numerator);
3662
3663 if (IsInt<32>(imm)) {
3664 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3665 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003666 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003667 }
3668
3669 __ subq(rax, rdx);
3670 __ movq(rdx, rax);
3671 } else {
3672 __ movq(rax, rdx);
3673 }
3674 }
3675}
3676
Calin Juravlebacfec32014-11-14 15:54:36 +00003677void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3678 DCHECK(instruction->IsDiv() || instruction->IsRem());
3679 Primitive::Type type = instruction->GetResultType();
Calin Juravlec70d1d92017-03-27 18:10:04 -07003680 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
Calin Juravlebacfec32014-11-14 15:54:36 +00003681
3682 bool is_div = instruction->IsDiv();
3683 LocationSummary* locations = instruction->GetLocations();
3684
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003685 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3686 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003687
Roland Levillain271ab9c2014-11-27 15:23:57 +00003688 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003689 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003690
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003691 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003692 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003693
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003694 if (imm == 0) {
3695 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3696 } else if (imm == 1 || imm == -1) {
3697 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003698 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003699 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003700 } else {
3701 DCHECK(imm <= -2 || imm >= 2);
3702 GenerateDivRemWithAnyConstant(instruction);
3703 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003704 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003705 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003706 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003707 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003708 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003709
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003710 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3711 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3712 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3713 // so it's safe to just use negl instead of more complex comparisons.
3714 if (type == Primitive::kPrimInt) {
3715 __ cmpl(second_reg, Immediate(-1));
3716 __ j(kEqual, slow_path->GetEntryLabel());
3717 // edx:eax <- sign-extended of eax
3718 __ cdq();
3719 // eax = quotient, edx = remainder
3720 __ idivl(second_reg);
3721 } else {
3722 __ cmpq(second_reg, Immediate(-1));
3723 __ j(kEqual, slow_path->GetEntryLabel());
3724 // rdx:rax <- sign-extended of rax
3725 __ cqo();
3726 // rax = quotient, rdx = remainder
3727 __ idivq(second_reg);
3728 }
3729 __ Bind(slow_path->GetExitLabel());
3730 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003731}
3732
Calin Juravle7c4954d2014-10-28 16:57:40 +00003733void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3734 LocationSummary* locations =
3735 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3736 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003737 case Primitive::kPrimInt:
3738 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003739 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003740 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003741 locations->SetOut(Location::SameAsFirstInput());
3742 // Intel uses edx:eax as the dividend.
3743 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003744 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3745 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3746 // output and request another temp.
3747 if (div->InputAt(1)->IsConstant()) {
3748 locations->AddTemp(Location::RequiresRegister());
3749 }
Calin Juravled0d48522014-11-04 16:40:20 +00003750 break;
3751 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003752
Calin Juravle7c4954d2014-10-28 16:57:40 +00003753 case Primitive::kPrimFloat:
3754 case Primitive::kPrimDouble: {
3755 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003756 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003757 locations->SetOut(Location::SameAsFirstInput());
3758 break;
3759 }
3760
3761 default:
3762 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3763 }
3764}
3765
3766void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3767 LocationSummary* locations = div->GetLocations();
3768 Location first = locations->InAt(0);
3769 Location second = locations->InAt(1);
3770 DCHECK(first.Equals(locations->Out()));
3771
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003772 Primitive::Type type = div->GetResultType();
3773 switch (type) {
3774 case Primitive::kPrimInt:
3775 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003776 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003777 break;
3778 }
3779
Calin Juravle7c4954d2014-10-28 16:57:40 +00003780 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003781 if (second.IsFpuRegister()) {
3782 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3783 } else if (second.IsConstant()) {
3784 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003785 codegen_->LiteralFloatAddress(
3786 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003787 } else {
3788 DCHECK(second.IsStackSlot());
3789 __ divss(first.AsFpuRegister<XmmRegister>(),
3790 Address(CpuRegister(RSP), second.GetStackIndex()));
3791 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003792 break;
3793 }
3794
3795 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003796 if (second.IsFpuRegister()) {
3797 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3798 } else if (second.IsConstant()) {
3799 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003800 codegen_->LiteralDoubleAddress(
3801 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003802 } else {
3803 DCHECK(second.IsDoubleStackSlot());
3804 __ divsd(first.AsFpuRegister<XmmRegister>(),
3805 Address(CpuRegister(RSP), second.GetStackIndex()));
3806 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003807 break;
3808 }
3809
3810 default:
3811 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3812 }
3813}
3814
Calin Juravlebacfec32014-11-14 15:54:36 +00003815void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003816 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003817 LocationSummary* locations =
3818 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003819
3820 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003821 case Primitive::kPrimInt:
3822 case Primitive::kPrimLong: {
3823 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003824 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003825 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3826 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003827 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3828 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3829 // output and request another temp.
3830 if (rem->InputAt(1)->IsConstant()) {
3831 locations->AddTemp(Location::RequiresRegister());
3832 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003833 break;
3834 }
3835
3836 case Primitive::kPrimFloat:
3837 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003838 locations->SetInAt(0, Location::Any());
3839 locations->SetInAt(1, Location::Any());
3840 locations->SetOut(Location::RequiresFpuRegister());
3841 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003842 break;
3843 }
3844
3845 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003846 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003847 }
3848}
3849
3850void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3851 Primitive::Type type = rem->GetResultType();
3852 switch (type) {
3853 case Primitive::kPrimInt:
3854 case Primitive::kPrimLong: {
3855 GenerateDivRemIntegral(rem);
3856 break;
3857 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003858 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003859 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003860 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003861 break;
3862 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003863 default:
3864 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3865 }
3866}
3867
Calin Juravled0d48522014-11-04 16:40:20 +00003868void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003869 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003870 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003871}
3872
3873void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003874 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003875 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3876 codegen_->AddSlowPath(slow_path);
3877
3878 LocationSummary* locations = instruction->GetLocations();
3879 Location value = locations->InAt(0);
3880
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003881 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003882 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003883 case Primitive::kPrimByte:
3884 case Primitive::kPrimChar:
3885 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003886 case Primitive::kPrimInt: {
3887 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003888 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003889 __ j(kEqual, slow_path->GetEntryLabel());
3890 } else if (value.IsStackSlot()) {
3891 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3892 __ j(kEqual, slow_path->GetEntryLabel());
3893 } else {
3894 DCHECK(value.IsConstant()) << value;
3895 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003896 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003897 }
3898 }
3899 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003900 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003901 case Primitive::kPrimLong: {
3902 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003903 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003904 __ j(kEqual, slow_path->GetEntryLabel());
3905 } else if (value.IsDoubleStackSlot()) {
3906 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3907 __ j(kEqual, slow_path->GetEntryLabel());
3908 } else {
3909 DCHECK(value.IsConstant()) << value;
3910 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003911 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003912 }
3913 }
3914 break;
3915 }
3916 default:
3917 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003918 }
Calin Juravled0d48522014-11-04 16:40:20 +00003919}
3920
Calin Juravle9aec02f2014-11-18 23:06:35 +00003921void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3922 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3923
3924 LocationSummary* locations =
3925 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3926
3927 switch (op->GetResultType()) {
3928 case Primitive::kPrimInt:
3929 case Primitive::kPrimLong: {
3930 locations->SetInAt(0, Location::RequiresRegister());
3931 // The shift count needs to be in CL.
3932 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3933 locations->SetOut(Location::SameAsFirstInput());
3934 break;
3935 }
3936 default:
3937 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3938 }
3939}
3940
3941void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3942 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3943
3944 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003945 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003946 Location second = locations->InAt(1);
3947
3948 switch (op->GetResultType()) {
3949 case Primitive::kPrimInt: {
3950 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003951 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003952 if (op->IsShl()) {
3953 __ shll(first_reg, second_reg);
3954 } else if (op->IsShr()) {
3955 __ sarl(first_reg, second_reg);
3956 } else {
3957 __ shrl(first_reg, second_reg);
3958 }
3959 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003960 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003961 if (op->IsShl()) {
3962 __ shll(first_reg, imm);
3963 } else if (op->IsShr()) {
3964 __ sarl(first_reg, imm);
3965 } else {
3966 __ shrl(first_reg, imm);
3967 }
3968 }
3969 break;
3970 }
3971 case Primitive::kPrimLong: {
3972 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003973 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003974 if (op->IsShl()) {
3975 __ shlq(first_reg, second_reg);
3976 } else if (op->IsShr()) {
3977 __ sarq(first_reg, second_reg);
3978 } else {
3979 __ shrq(first_reg, second_reg);
3980 }
3981 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003982 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003983 if (op->IsShl()) {
3984 __ shlq(first_reg, imm);
3985 } else if (op->IsShr()) {
3986 __ sarq(first_reg, imm);
3987 } else {
3988 __ shrq(first_reg, imm);
3989 }
3990 }
3991 break;
3992 }
3993 default:
3994 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003995 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003996 }
3997}
3998
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003999void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4000 LocationSummary* locations =
4001 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4002
4003 switch (ror->GetResultType()) {
4004 case Primitive::kPrimInt:
4005 case Primitive::kPrimLong: {
4006 locations->SetInAt(0, Location::RequiresRegister());
4007 // The shift count needs to be in CL (unless it is a constant).
4008 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4009 locations->SetOut(Location::SameAsFirstInput());
4010 break;
4011 }
4012 default:
4013 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4014 UNREACHABLE();
4015 }
4016}
4017
4018void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4019 LocationSummary* locations = ror->GetLocations();
4020 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4021 Location second = locations->InAt(1);
4022
4023 switch (ror->GetResultType()) {
4024 case Primitive::kPrimInt:
4025 if (second.IsRegister()) {
4026 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4027 __ rorl(first_reg, second_reg);
4028 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004029 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004030 __ rorl(first_reg, imm);
4031 }
4032 break;
4033 case Primitive::kPrimLong:
4034 if (second.IsRegister()) {
4035 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4036 __ rorq(first_reg, second_reg);
4037 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004038 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004039 __ rorq(first_reg, imm);
4040 }
4041 break;
4042 default:
4043 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4044 UNREACHABLE();
4045 }
4046}
4047
Calin Juravle9aec02f2014-11-18 23:06:35 +00004048void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4049 HandleShift(shl);
4050}
4051
4052void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4053 HandleShift(shl);
4054}
4055
4056void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4057 HandleShift(shr);
4058}
4059
4060void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4061 HandleShift(shr);
4062}
4063
4064void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4065 HandleShift(ushr);
4066}
4067
4068void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4069 HandleShift(ushr);
4070}
4071
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004072void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004073 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004074 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004075 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004076 if (instruction->IsStringAlloc()) {
4077 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4078 } else {
4079 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004080 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004081 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004082}
4083
4084void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004085 // Note: if heap poisoning is enabled, the entry point takes cares
4086 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004087 if (instruction->IsStringAlloc()) {
4088 // String is allocated through StringFactory. Call NewEmptyString entry point.
4089 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004090 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004091 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4092 __ call(Address(temp, code_offset.SizeValue()));
4093 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4094 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004095 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004096 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004097 DCHECK(!codegen_->IsLeafMethod());
4098 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004099}
4100
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004101void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4102 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004103 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004104 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004105 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004106 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4107 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004108}
4109
4110void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004111 // Note: if heap poisoning is enabled, the entry point takes cares
4112 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004113 QuickEntrypointEnum entrypoint =
4114 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4115 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004116 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004117 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004118}
4119
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004120void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004121 LocationSummary* locations =
4122 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004123 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4124 if (location.IsStackSlot()) {
4125 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4126 } else if (location.IsDoubleStackSlot()) {
4127 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4128 }
4129 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004130}
4131
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004132void InstructionCodeGeneratorX86_64::VisitParameterValue(
4133 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004134 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004135}
4136
4137void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4138 LocationSummary* locations =
4139 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4140 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4141}
4142
4143void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4144 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4145 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004146}
4147
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004148void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4149 LocationSummary* locations =
4150 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4151 locations->SetInAt(0, Location::RequiresRegister());
4152 locations->SetOut(Location::RequiresRegister());
4153}
4154
4155void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4156 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004157 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004158 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004159 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004160 __ movq(locations->Out().AsRegister<CpuRegister>(),
4161 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004162 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004163 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004164 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004165 __ movq(locations->Out().AsRegister<CpuRegister>(),
4166 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4167 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004168 __ movq(locations->Out().AsRegister<CpuRegister>(),
4169 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004170 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004171}
4172
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004173void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004174 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004175 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004176 locations->SetInAt(0, Location::RequiresRegister());
4177 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004178}
4179
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004180void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4181 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004182 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4183 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004184 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004185 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004186 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004187 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004188 break;
4189
4190 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004191 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004192 break;
4193
4194 default:
4195 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4196 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004197}
4198
David Brazdil66d126e2015-04-03 16:02:44 +01004199void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4200 LocationSummary* locations =
4201 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4202 locations->SetInAt(0, Location::RequiresRegister());
4203 locations->SetOut(Location::SameAsFirstInput());
4204}
4205
4206void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004207 LocationSummary* locations = bool_not->GetLocations();
4208 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4209 locations->Out().AsRegister<CpuRegister>().AsRegister());
4210 Location out = locations->Out();
4211 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4212}
4213
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004214void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004215 LocationSummary* locations =
4216 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004217 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004218 locations->SetInAt(i, Location::Any());
4219 }
4220 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004221}
4222
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004223void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004224 LOG(FATAL) << "Unimplemented";
4225}
4226
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004227void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004228 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004229 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004230 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004231 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4232 */
4233 switch (kind) {
4234 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004235 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004236 break;
4237 }
4238 case MemBarrierKind::kAnyStore:
4239 case MemBarrierKind::kLoadAny:
4240 case MemBarrierKind::kStoreStore: {
4241 // nop
4242 break;
4243 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004244 case MemBarrierKind::kNTStoreStore:
4245 // Non-Temporal Store/Store needs an explicit fence.
4246 MemoryFence(/* non-temporal */ true);
4247 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004248 }
4249}
4250
4251void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4252 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4253
Roland Levillain0d5a2812015-11-13 10:07:31 +00004254 bool object_field_get_with_read_barrier =
4255 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004256 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004257 new (GetGraph()->GetArena()) LocationSummary(instruction,
4258 object_field_get_with_read_barrier ?
4259 LocationSummary::kCallOnSlowPath :
4260 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004261 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004262 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004263 }
Calin Juravle52c48962014-12-16 17:02:57 +00004264 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004265 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4266 locations->SetOut(Location::RequiresFpuRegister());
4267 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004268 // The output overlaps for an object field get when read barriers
4269 // are enabled: we do not want the move to overwrite the object's
4270 // location, as we need it to emit the read barrier.
4271 locations->SetOut(
4272 Location::RequiresRegister(),
4273 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004274 }
Calin Juravle52c48962014-12-16 17:02:57 +00004275}
4276
4277void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4278 const FieldInfo& field_info) {
4279 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4280
4281 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004282 Location base_loc = locations->InAt(0);
4283 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004284 Location out = locations->Out();
4285 bool is_volatile = field_info.IsVolatile();
4286 Primitive::Type field_type = field_info.GetFieldType();
4287 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4288
4289 switch (field_type) {
4290 case Primitive::kPrimBoolean: {
4291 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4292 break;
4293 }
4294
4295 case Primitive::kPrimByte: {
4296 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4297 break;
4298 }
4299
4300 case Primitive::kPrimShort: {
4301 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4302 break;
4303 }
4304
4305 case Primitive::kPrimChar: {
4306 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4307 break;
4308 }
4309
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004310 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004311 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4312 break;
4313 }
4314
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004315 case Primitive::kPrimNot: {
4316 // /* HeapReference<Object> */ out = *(base + offset)
4317 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004318 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004319 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004320 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004321 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004322 if (is_volatile) {
4323 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4324 }
4325 } else {
4326 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4327 codegen_->MaybeRecordImplicitNullCheck(instruction);
4328 if (is_volatile) {
4329 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4330 }
4331 // If read barriers are enabled, emit read barriers other than
4332 // Baker's using a slow path (and also unpoison the loaded
4333 // reference, if heap poisoning is enabled).
4334 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4335 }
4336 break;
4337 }
4338
Calin Juravle52c48962014-12-16 17:02:57 +00004339 case Primitive::kPrimLong: {
4340 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4341 break;
4342 }
4343
4344 case Primitive::kPrimFloat: {
4345 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4346 break;
4347 }
4348
4349 case Primitive::kPrimDouble: {
4350 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4351 break;
4352 }
4353
4354 case Primitive::kPrimVoid:
4355 LOG(FATAL) << "Unreachable type " << field_type;
4356 UNREACHABLE();
4357 }
4358
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004359 if (field_type == Primitive::kPrimNot) {
4360 // Potential implicit null checks, in the case of reference
4361 // fields, are handled in the previous switch statement.
4362 } else {
4363 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004364 }
Roland Levillain4d027112015-07-01 15:41:14 +01004365
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004366 if (is_volatile) {
4367 if (field_type == Primitive::kPrimNot) {
4368 // Memory barriers, in the case of references, are also handled
4369 // in the previous switch statement.
4370 } else {
4371 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4372 }
Roland Levillain4d027112015-07-01 15:41:14 +01004373 }
Calin Juravle52c48962014-12-16 17:02:57 +00004374}
4375
4376void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4377 const FieldInfo& field_info) {
4378 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4379
4380 LocationSummary* locations =
4381 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004382 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004383 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004384 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004385 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004386
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004387 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004388 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004389 if (is_volatile) {
4390 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4391 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4392 } else {
4393 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4394 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004395 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004396 if (is_volatile) {
4397 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4398 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4399 } else {
4400 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4401 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004402 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004403 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004404 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004405 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004406 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004407 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4408 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004409 locations->AddTemp(Location::RequiresRegister());
4410 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004411}
4412
Calin Juravle52c48962014-12-16 17:02:57 +00004413void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004414 const FieldInfo& field_info,
4415 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004416 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4417
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004418 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004419 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4420 Location value = locations->InAt(1);
4421 bool is_volatile = field_info.IsVolatile();
4422 Primitive::Type field_type = field_info.GetFieldType();
4423 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4424
4425 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004426 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004427 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004428
Mark Mendellea5af682015-10-22 17:35:49 -04004429 bool maybe_record_implicit_null_check_done = false;
4430
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004431 switch (field_type) {
4432 case Primitive::kPrimBoolean:
4433 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004434 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004435 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004436 __ movb(Address(base, offset), Immediate(v));
4437 } else {
4438 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4439 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004440 break;
4441 }
4442
4443 case Primitive::kPrimShort:
4444 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004445 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004446 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004447 __ movw(Address(base, offset), Immediate(v));
4448 } else {
4449 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4450 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004451 break;
4452 }
4453
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004454 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004455 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004456 if (value.IsConstant()) {
4457 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004458 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4459 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4460 // Note: if heap poisoning is enabled, no need to poison
4461 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004462 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004463 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004464 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4465 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4466 __ movl(temp, value.AsRegister<CpuRegister>());
4467 __ PoisonHeapReference(temp);
4468 __ movl(Address(base, offset), temp);
4469 } else {
4470 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4471 }
Mark Mendell40741f32015-04-20 22:10:34 -04004472 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004473 break;
4474 }
4475
4476 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004477 if (value.IsConstant()) {
4478 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004479 codegen_->MoveInt64ToAddress(Address(base, offset),
4480 Address(base, offset + sizeof(int32_t)),
4481 v,
4482 instruction);
4483 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004484 } else {
4485 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4486 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004487 break;
4488 }
4489
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004490 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004491 if (value.IsConstant()) {
4492 int32_t v =
4493 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4494 __ movl(Address(base, offset), Immediate(v));
4495 } else {
4496 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4497 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004498 break;
4499 }
4500
4501 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004502 if (value.IsConstant()) {
4503 int64_t v =
4504 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4505 codegen_->MoveInt64ToAddress(Address(base, offset),
4506 Address(base, offset + sizeof(int32_t)),
4507 v,
4508 instruction);
4509 maybe_record_implicit_null_check_done = true;
4510 } else {
4511 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4512 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004513 break;
4514 }
4515
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004516 case Primitive::kPrimVoid:
4517 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004518 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004519 }
Calin Juravle52c48962014-12-16 17:02:57 +00004520
Mark Mendellea5af682015-10-22 17:35:49 -04004521 if (!maybe_record_implicit_null_check_done) {
4522 codegen_->MaybeRecordImplicitNullCheck(instruction);
4523 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004524
4525 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4526 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4527 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004528 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004529 }
4530
Calin Juravle52c48962014-12-16 17:02:57 +00004531 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004532 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004533 }
4534}
4535
4536void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4537 HandleFieldSet(instruction, instruction->GetFieldInfo());
4538}
4539
4540void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004541 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004542}
4543
4544void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004545 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004546}
4547
4548void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004549 HandleFieldGet(instruction, instruction->GetFieldInfo());
4550}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004551
Calin Juravle52c48962014-12-16 17:02:57 +00004552void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4553 HandleFieldGet(instruction);
4554}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004555
Calin Juravle52c48962014-12-16 17:02:57 +00004556void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4557 HandleFieldGet(instruction, instruction->GetFieldInfo());
4558}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004559
Calin Juravle52c48962014-12-16 17:02:57 +00004560void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4561 HandleFieldSet(instruction, instruction->GetFieldInfo());
4562}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004563
Calin Juravle52c48962014-12-16 17:02:57 +00004564void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004565 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004566}
4567
Calin Juravlee460d1d2015-09-29 04:52:17 +01004568void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4569 HUnresolvedInstanceFieldGet* instruction) {
4570 FieldAccessCallingConventionX86_64 calling_convention;
4571 codegen_->CreateUnresolvedFieldLocationSummary(
4572 instruction, instruction->GetFieldType(), calling_convention);
4573}
4574
4575void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4576 HUnresolvedInstanceFieldGet* instruction) {
4577 FieldAccessCallingConventionX86_64 calling_convention;
4578 codegen_->GenerateUnresolvedFieldAccess(instruction,
4579 instruction->GetFieldType(),
4580 instruction->GetFieldIndex(),
4581 instruction->GetDexPc(),
4582 calling_convention);
4583}
4584
4585void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4586 HUnresolvedInstanceFieldSet* instruction) {
4587 FieldAccessCallingConventionX86_64 calling_convention;
4588 codegen_->CreateUnresolvedFieldLocationSummary(
4589 instruction, instruction->GetFieldType(), calling_convention);
4590}
4591
4592void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4593 HUnresolvedInstanceFieldSet* instruction) {
4594 FieldAccessCallingConventionX86_64 calling_convention;
4595 codegen_->GenerateUnresolvedFieldAccess(instruction,
4596 instruction->GetFieldType(),
4597 instruction->GetFieldIndex(),
4598 instruction->GetDexPc(),
4599 calling_convention);
4600}
4601
4602void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4603 HUnresolvedStaticFieldGet* instruction) {
4604 FieldAccessCallingConventionX86_64 calling_convention;
4605 codegen_->CreateUnresolvedFieldLocationSummary(
4606 instruction, instruction->GetFieldType(), calling_convention);
4607}
4608
4609void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4610 HUnresolvedStaticFieldGet* instruction) {
4611 FieldAccessCallingConventionX86_64 calling_convention;
4612 codegen_->GenerateUnresolvedFieldAccess(instruction,
4613 instruction->GetFieldType(),
4614 instruction->GetFieldIndex(),
4615 instruction->GetDexPc(),
4616 calling_convention);
4617}
4618
4619void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4620 HUnresolvedStaticFieldSet* instruction) {
4621 FieldAccessCallingConventionX86_64 calling_convention;
4622 codegen_->CreateUnresolvedFieldLocationSummary(
4623 instruction, instruction->GetFieldType(), calling_convention);
4624}
4625
4626void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4627 HUnresolvedStaticFieldSet* instruction) {
4628 FieldAccessCallingConventionX86_64 calling_convention;
4629 codegen_->GenerateUnresolvedFieldAccess(instruction,
4630 instruction->GetFieldType(),
4631 instruction->GetFieldIndex(),
4632 instruction->GetDexPc(),
4633 calling_convention);
4634}
4635
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004636void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004637 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4638 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4639 ? Location::RequiresRegister()
4640 : Location::Any();
4641 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004642}
4643
Calin Juravle2ae48182016-03-16 14:05:09 +00004644void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4645 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004646 return;
4647 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004648 LocationSummary* locations = instruction->GetLocations();
4649 Location obj = locations->InAt(0);
4650
4651 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004652 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004653}
4654
Calin Juravle2ae48182016-03-16 14:05:09 +00004655void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004656 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004657 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004658
4659 LocationSummary* locations = instruction->GetLocations();
4660 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004661
4662 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004663 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004664 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004665 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004666 } else {
4667 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004668 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004669 __ jmp(slow_path->GetEntryLabel());
4670 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004671 }
4672 __ j(kEqual, slow_path->GetEntryLabel());
4673}
4674
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004675void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004676 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004677}
4678
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004679void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004680 bool object_array_get_with_read_barrier =
4681 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004682 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004683 new (GetGraph()->GetArena()) LocationSummary(instruction,
4684 object_array_get_with_read_barrier ?
4685 LocationSummary::kCallOnSlowPath :
4686 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004687 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004688 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004689 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004690 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004691 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004692 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4693 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4694 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004695 // The output overlaps for an object array get when read barriers
4696 // are enabled: we do not want the move to overwrite the array's
4697 // location, as we need it to emit the read barrier.
4698 locations->SetOut(
4699 Location::RequiresRegister(),
4700 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004701 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004702}
4703
4704void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4705 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004706 Location obj_loc = locations->InAt(0);
4707 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004708 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004709 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004710 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004711
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004712 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004713 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004714 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004715 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004716 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004717 break;
4718 }
4719
4720 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004721 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004722 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004723 break;
4724 }
4725
4726 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004727 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004728 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004729 break;
4730 }
4731
4732 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004733 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004734 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4735 // Branch cases into compressed and uncompressed for each index's type.
4736 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4737 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00004738 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004739 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004740 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4741 "Expecting 0=compressed, 1=uncompressed");
4742 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004743 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4744 __ jmp(&done);
4745 __ Bind(&not_compressed);
4746 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4747 __ Bind(&done);
4748 } else {
4749 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4750 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004751 break;
4752 }
4753
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004754 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004755 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004756 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004757 break;
4758 }
4759
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004760 case Primitive::kPrimNot: {
4761 static_assert(
4762 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4763 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004764 // /* HeapReference<Object> */ out =
4765 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4766 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004767 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004768 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004769 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004770 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004771 } else {
4772 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004773 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4774 codegen_->MaybeRecordImplicitNullCheck(instruction);
4775 // If read barriers are enabled, emit read barriers other than
4776 // Baker's using a slow path (and also unpoison the loaded
4777 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004778 if (index.IsConstant()) {
4779 uint32_t offset =
4780 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004781 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4782 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004783 codegen_->MaybeGenerateReadBarrierSlow(
4784 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4785 }
4786 }
4787 break;
4788 }
4789
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004790 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004791 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004792 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004793 break;
4794 }
4795
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004796 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004797 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004798 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004799 break;
4800 }
4801
4802 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004803 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004804 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004805 break;
4806 }
4807
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004808 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004809 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004810 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004811 }
Roland Levillain4d027112015-07-01 15:41:14 +01004812
4813 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004814 // Potential implicit null checks, in the case of reference
4815 // arrays, are handled in the previous switch statement.
4816 } else {
4817 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004818 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004819}
4820
4821void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004822 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004823
4824 bool needs_write_barrier =
4825 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004826 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004827
Nicolas Geoffray39468442014-09-02 15:17:15 +01004828 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004829 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004830 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004831 LocationSummary::kCallOnSlowPath :
4832 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004833
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004834 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004835 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4836 if (Primitive::IsFloatingPointType(value_type)) {
4837 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004838 } else {
4839 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4840 }
4841
4842 if (needs_write_barrier) {
4843 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004844 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004845 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004846 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004847}
4848
4849void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4850 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004851 Location array_loc = locations->InAt(0);
4852 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004853 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004854 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004855 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004856 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004857 bool needs_write_barrier =
4858 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004859 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4860 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4861 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004862
4863 switch (value_type) {
4864 case Primitive::kPrimBoolean:
4865 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004866 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004867 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004868 if (value.IsRegister()) {
4869 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004870 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004871 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004872 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004873 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004874 break;
4875 }
4876
4877 case Primitive::kPrimShort:
4878 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004879 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004880 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004881 if (value.IsRegister()) {
4882 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004883 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004884 DCHECK(value.IsConstant()) << value;
4885 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004886 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004887 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004888 break;
4889 }
4890
4891 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004892 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004893 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004894
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004895 if (!value.IsRegister()) {
4896 // Just setting null.
4897 DCHECK(instruction->InputAt(2)->IsNullConstant());
4898 DCHECK(value.IsConstant()) << value;
4899 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004900 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004901 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004902 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004903 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004904 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004905
4906 DCHECK(needs_write_barrier);
4907 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004908 // We cannot use a NearLabel for `done`, as its range may be too
4909 // short when Baker read barriers are enabled.
4910 Label done;
4911 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004912 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004913 Location temp_loc = locations->GetTemp(0);
4914 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004915 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004916 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4917 codegen_->AddSlowPath(slow_path);
4918 if (instruction->GetValueCanBeNull()) {
4919 __ testl(register_value, register_value);
4920 __ j(kNotEqual, &not_null);
4921 __ movl(address, Immediate(0));
4922 codegen_->MaybeRecordImplicitNullCheck(instruction);
4923 __ jmp(&done);
4924 __ Bind(&not_null);
4925 }
4926
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004927 // Note that when Baker read barriers are enabled, the type
4928 // checks are performed without read barriers. This is fine,
4929 // even in the case where a class object is in the from-space
4930 // after the flip, as a comparison involving such a type would
4931 // not produce a false positive; it may of course produce a
4932 // false negative, in which case we would take the ArraySet
4933 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004934
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004935 // /* HeapReference<Class> */ temp = array->klass_
4936 __ movl(temp, Address(array, class_offset));
4937 codegen_->MaybeRecordImplicitNullCheck(instruction);
4938 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004939
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004940 // /* HeapReference<Class> */ temp = temp->component_type_
4941 __ movl(temp, Address(temp, component_offset));
4942 // If heap poisoning is enabled, no need to unpoison `temp`
4943 // nor the object reference in `register_value->klass`, as
4944 // we are comparing two poisoned references.
4945 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004946
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004947 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4948 __ j(kEqual, &do_put);
4949 // If heap poisoning is enabled, the `temp` reference has
4950 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004951 __ MaybeUnpoisonHeapReference(temp);
4952
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004953 // If heap poisoning is enabled, no need to unpoison the
4954 // heap reference loaded below, as it is only used for a
4955 // comparison with null.
4956 __ cmpl(Address(temp, super_offset), Immediate(0));
4957 __ j(kNotEqual, slow_path->GetEntryLabel());
4958 __ Bind(&do_put);
4959 } else {
4960 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004961 }
4962 }
4963
4964 if (kPoisonHeapReferences) {
4965 __ movl(temp, register_value);
4966 __ PoisonHeapReference(temp);
4967 __ movl(address, temp);
4968 } else {
4969 __ movl(address, register_value);
4970 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004971 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004972 codegen_->MaybeRecordImplicitNullCheck(instruction);
4973 }
4974
4975 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4976 codegen_->MarkGCCard(
4977 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4978 __ Bind(&done);
4979
4980 if (slow_path != nullptr) {
4981 __ Bind(slow_path->GetExitLabel());
4982 }
4983
4984 break;
4985 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004986
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004987 case Primitive::kPrimInt: {
4988 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004989 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004990 if (value.IsRegister()) {
4991 __ movl(address, value.AsRegister<CpuRegister>());
4992 } else {
4993 DCHECK(value.IsConstant()) << value;
4994 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4995 __ movl(address, Immediate(v));
4996 }
4997 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004998 break;
4999 }
5000
5001 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005002 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005003 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005004 if (value.IsRegister()) {
5005 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005006 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005007 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005008 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005009 Address address_high =
5010 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005011 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005012 }
5013 break;
5014 }
5015
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005016 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005017 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005018 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005019 if (value.IsFpuRegister()) {
5020 __ movss(address, value.AsFpuRegister<XmmRegister>());
5021 } else {
5022 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005023 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005024 __ movl(address, Immediate(v));
5025 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005026 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005027 break;
5028 }
5029
5030 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005031 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005032 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005033 if (value.IsFpuRegister()) {
5034 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5035 codegen_->MaybeRecordImplicitNullCheck(instruction);
5036 } else {
5037 int64_t v =
5038 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005039 Address address_high =
5040 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005041 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5042 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005043 break;
5044 }
5045
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005046 case Primitive::kPrimVoid:
5047 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005048 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005049 }
5050}
5051
5052void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005053 LocationSummary* locations =
5054 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005055 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005056 if (!instruction->IsEmittedAtUseSite()) {
5057 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5058 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005059}
5060
5061void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005062 if (instruction->IsEmittedAtUseSite()) {
5063 return;
5064 }
5065
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005066 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005067 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005068 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5069 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005070 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005071 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005072 // Mask out most significant bit in case the array is String's array of char.
5073 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005074 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005075 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005076}
5077
5078void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005079 RegisterSet caller_saves = RegisterSet::Empty();
5080 InvokeRuntimeCallingConvention calling_convention;
5081 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5082 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5083 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005084 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005085 HInstruction* length = instruction->InputAt(1);
5086 if (!length->IsEmittedAtUseSite()) {
5087 locations->SetInAt(1, Location::RegisterOrConstant(length));
5088 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005089}
5090
5091void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5092 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005093 Location index_loc = locations->InAt(0);
5094 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005095 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005096
Mark Mendell99dbd682015-04-22 16:18:52 -04005097 if (length_loc.IsConstant()) {
5098 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5099 if (index_loc.IsConstant()) {
5100 // BCE will remove the bounds check if we are guarenteed to pass.
5101 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5102 if (index < 0 || index >= length) {
5103 codegen_->AddSlowPath(slow_path);
5104 __ jmp(slow_path->GetEntryLabel());
5105 } else {
5106 // Some optimization after BCE may have generated this, and we should not
5107 // generate a bounds check if it is a valid range.
5108 }
5109 return;
5110 }
5111
5112 // We have to reverse the jump condition because the length is the constant.
5113 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5114 __ cmpl(index_reg, Immediate(length));
5115 codegen_->AddSlowPath(slow_path);
5116 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005117 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005118 HInstruction* array_length = instruction->InputAt(1);
5119 if (array_length->IsEmittedAtUseSite()) {
5120 // Address the length field in the array.
5121 DCHECK(array_length->IsArrayLength());
5122 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5123 Location array_loc = array_length->GetLocations()->InAt(0);
5124 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005125 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005126 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5127 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005128 CpuRegister length_reg = CpuRegister(TMP);
5129 __ movl(length_reg, array_len);
5130 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005131 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005132 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005133 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005134 // Checking the bound for general case:
5135 // Array of char or String's array when the compression feature off.
5136 if (index_loc.IsConstant()) {
5137 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5138 __ cmpl(array_len, Immediate(value));
5139 } else {
5140 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5141 }
5142 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005143 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005144 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005145 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005146 }
5147 codegen_->AddSlowPath(slow_path);
5148 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005149 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005150}
5151
5152void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5153 CpuRegister card,
5154 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005155 CpuRegister value,
5156 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005157 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005158 if (value_can_be_null) {
5159 __ testl(value, value);
5160 __ j(kEqual, &is_null);
5161 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005162 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005163 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005164 __ movq(temp, object);
5165 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005166 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005167 if (value_can_be_null) {
5168 __ Bind(&is_null);
5169 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005170}
5171
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005172void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005173 LOG(FATAL) << "Unimplemented";
5174}
5175
5176void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005177 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5178}
5179
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005180void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005181 LocationSummary* locations =
5182 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005183 // In suspend check slow path, usually there are no caller-save registers at all.
5184 // If SIMD instructions are present, however, we force spilling all live SIMD
5185 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005186 locations->SetCustomSlowPathCallerSaves(
5187 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005188}
5189
5190void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005191 HBasicBlock* block = instruction->GetBlock();
5192 if (block->GetLoopInformation() != nullptr) {
5193 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5194 // The back edge will generate the suspend check.
5195 return;
5196 }
5197 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5198 // The goto will generate the suspend check.
5199 return;
5200 }
5201 GenerateSuspendCheck(instruction, nullptr);
5202}
5203
5204void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5205 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005206 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005207 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5208 if (slow_path == nullptr) {
5209 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5210 instruction->SetSlowPath(slow_path);
5211 codegen_->AddSlowPath(slow_path);
5212 if (successor != nullptr) {
5213 DCHECK(successor->IsLoopHeader());
5214 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5215 }
5216 } else {
5217 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5218 }
5219
Andreas Gampe542451c2016-07-26 09:02:02 -07005220 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005221 /* no_rip */ true),
5222 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005223 if (successor == nullptr) {
5224 __ j(kNotEqual, slow_path->GetEntryLabel());
5225 __ Bind(slow_path->GetReturnLabel());
5226 } else {
5227 __ j(kEqual, codegen_->GetLabelOf(successor));
5228 __ jmp(slow_path->GetEntryLabel());
5229 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005230}
5231
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005232X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5233 return codegen_->GetAssembler();
5234}
5235
5236void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005237 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005238 Location source = move->GetSource();
5239 Location destination = move->GetDestination();
5240
5241 if (source.IsRegister()) {
5242 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005243 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005244 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005245 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005246 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005247 } else {
5248 DCHECK(destination.IsDoubleStackSlot());
5249 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005250 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005251 }
5252 } else if (source.IsStackSlot()) {
5253 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005254 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005255 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005256 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005257 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005258 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005259 } else {
5260 DCHECK(destination.IsStackSlot());
5261 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5262 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5263 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005264 } else if (source.IsDoubleStackSlot()) {
5265 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005266 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005267 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005268 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005269 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5270 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005271 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005272 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005273 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5274 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5275 }
Aart Bik5576f372017-03-23 16:17:37 -07005276 } else if (source.IsSIMDStackSlot()) {
5277 DCHECK(destination.IsFpuRegister());
5278 __ movups(destination.AsFpuRegister<XmmRegister>(),
5279 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005280 } else if (source.IsConstant()) {
5281 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005282 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5283 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005284 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005285 if (value == 0) {
5286 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5287 } else {
5288 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5289 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005290 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005291 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005292 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005293 }
5294 } else if (constant->IsLongConstant()) {
5295 int64_t value = constant->AsLongConstant()->GetValue();
5296 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005297 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005298 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005299 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005300 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005301 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005302 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005303 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005304 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005305 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005306 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005307 } else {
5308 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005309 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005310 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5311 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005312 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005313 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005314 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005315 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005316 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005317 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005318 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005319 } else {
5320 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005321 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005322 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005323 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005324 } else if (source.IsFpuRegister()) {
5325 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005326 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005327 } else if (destination.IsStackSlot()) {
5328 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005329 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005330 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005331 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005332 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005333 } else {
5334 DCHECK(destination.IsSIMDStackSlot());
5335 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5336 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005337 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005338 }
5339}
5340
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005341void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005342 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005343 __ movl(Address(CpuRegister(RSP), mem), reg);
5344 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005345}
5346
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005347void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005348 ScratchRegisterScope ensure_scratch(
5349 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5350
5351 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5352 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5353 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5354 Address(CpuRegister(RSP), mem2 + stack_offset));
5355 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5356 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5357 CpuRegister(ensure_scratch.GetRegister()));
5358}
5359
Mark Mendell8a1c7282015-06-29 15:41:28 -04005360void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5361 __ movq(CpuRegister(TMP), reg1);
5362 __ movq(reg1, reg2);
5363 __ movq(reg2, CpuRegister(TMP));
5364}
5365
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005366void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5367 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5368 __ movq(Address(CpuRegister(RSP), mem), reg);
5369 __ movq(reg, CpuRegister(TMP));
5370}
5371
5372void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5373 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005374 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005375
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005376 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5377 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5378 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5379 Address(CpuRegister(RSP), mem2 + stack_offset));
5380 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5381 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5382 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005383}
5384
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005385void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5386 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5387 __ movss(Address(CpuRegister(RSP), mem), reg);
5388 __ movd(reg, CpuRegister(TMP));
5389}
5390
5391void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5392 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5393 __ movsd(Address(CpuRegister(RSP), mem), reg);
5394 __ movd(reg, CpuRegister(TMP));
5395}
5396
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005397void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005398 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005399 Location source = move->GetSource();
5400 Location destination = move->GetDestination();
5401
5402 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005403 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005404 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005405 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005406 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005407 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005408 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005409 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5410 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005411 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005412 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005413 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005414 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5415 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005416 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005417 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5418 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5419 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005420 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005421 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005422 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005423 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005424 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005425 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005426 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005427 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005428 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005429 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005430 }
5431}
5432
5433
5434void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5435 __ pushq(CpuRegister(reg));
5436}
5437
5438
5439void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5440 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005441}
5442
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005443void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005444 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005445 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5446 Immediate(mirror::Class::kStatusInitialized));
5447 __ j(kLess, slow_path->GetEntryLabel());
5448 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005449 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005450}
5451
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005452HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5453 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005454 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005455 case HLoadClass::LoadKind::kInvalid:
5456 LOG(FATAL) << "UNREACHABLE";
5457 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005458 case HLoadClass::LoadKind::kReferrersClass:
5459 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005460 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005461 case HLoadClass::LoadKind::kBssEntry:
5462 DCHECK(!Runtime::Current()->UseJitCompilation());
5463 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005464 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005465 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005466 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005467 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005468 case HLoadClass::LoadKind::kDexCacheViaMethod:
5469 break;
5470 }
5471 return desired_class_load_kind;
5472}
5473
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005474void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005475 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5476 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005477 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005478 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005479 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005480 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005481 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005482 return;
5483 }
Vladimir Marko41559982017-01-06 14:04:23 +00005484 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005485
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005486 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5487 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005488 ? LocationSummary::kCallOnSlowPath
5489 : LocationSummary::kNoCall;
5490 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005491 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005492 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005493 }
5494
Vladimir Marko41559982017-01-06 14:04:23 +00005495 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005496 locations->SetInAt(0, Location::RequiresRegister());
5497 }
5498 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005499 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5500 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5501 // Rely on the type resolution and/or initialization to save everything.
5502 // Custom calling convention: RAX serves as both input and output.
5503 RegisterSet caller_saves = RegisterSet::Empty();
5504 caller_saves.Add(Location::RegisterLocation(RAX));
5505 locations->SetCustomSlowPathCallerSaves(caller_saves);
5506 } else {
5507 // For non-Baker read barrier we have a temp-clobbering call.
5508 }
5509 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005510}
5511
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005512Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
5513 dex::TypeIndex dex_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005514 Handle<mirror::Class> handle) {
5515 jit_class_roots_.Overwrite(
5516 TypeReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005517 // Add a patch entry and return the label.
5518 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
5519 PatchInfo<Label>* info = &jit_class_patches_.back();
5520 return &info->label;
5521}
5522
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005523// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5524// move.
5525void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005526 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5527 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5528 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005529 return;
5530 }
Vladimir Marko41559982017-01-06 14:04:23 +00005531 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005532
Vladimir Marko41559982017-01-06 14:04:23 +00005533 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005534 Location out_loc = locations->Out();
5535 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005536
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005537 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5538 ? kWithoutReadBarrier
5539 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005540 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005541 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005542 case HLoadClass::LoadKind::kReferrersClass: {
5543 DCHECK(!cls->CanCallRuntime());
5544 DCHECK(!cls->MustGenerateClinitCheck());
5545 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5546 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5547 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005548 cls,
5549 out_loc,
5550 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005551 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005552 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005553 break;
5554 }
5555 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005556 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005557 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005558 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko1998cd02017-01-13 13:02:58 +00005559 codegen_->RecordBootTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005560 break;
5561 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005562 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005563 uint32_t address = dchecked_integral_cast<uint32_t>(
5564 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5565 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005566 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005567 break;
5568 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005569 case HLoadClass::LoadKind::kBssEntry: {
5570 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5571 /* no_rip */ false);
5572 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
5573 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5574 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
5575 generate_null_check = true;
5576 break;
5577 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005578 case HLoadClass::LoadKind::kJitTableAddress: {
5579 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5580 /* no_rip */ true);
5581 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005582 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005583 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00005584 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005585 break;
5586 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005587 default:
5588 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5589 UNREACHABLE();
5590 }
5591
5592 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5593 DCHECK(cls->CanCallRuntime());
5594 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5595 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5596 codegen_->AddSlowPath(slow_path);
5597 if (generate_null_check) {
5598 __ testl(out, out);
5599 __ j(kEqual, slow_path->GetEntryLabel());
5600 }
5601 if (cls->MustGenerateClinitCheck()) {
5602 GenerateClassInitializationCheck(slow_path, out);
5603 } else {
5604 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005605 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005606 }
5607}
5608
5609void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5610 LocationSummary* locations =
5611 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5612 locations->SetInAt(0, Location::RequiresRegister());
5613 if (check->HasUses()) {
5614 locations->SetOut(Location::SameAsFirstInput());
5615 }
5616}
5617
5618void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005619 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005620 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005621 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005622 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005623 GenerateClassInitializationCheck(slow_path,
5624 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005625}
5626
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005627HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5628 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005629 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005630 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005631 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005632 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005633 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005634 case HLoadString::LoadKind::kJitTableAddress:
5635 DCHECK(Runtime::Current()->UseJitCompilation());
5636 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005637 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005638 case HLoadString::LoadKind::kDexCacheViaMethod:
5639 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005640 }
5641 return desired_string_load_kind;
5642}
5643
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005644void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005645 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005646 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005647 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005648 locations->SetOut(Location::RegisterLocation(RAX));
5649 } else {
5650 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005651 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5652 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005653 // Rely on the pResolveString to save everything.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005654 // Custom calling convention: RAX serves as both input and output.
5655 RegisterSet caller_saves = RegisterSet::Empty();
5656 caller_saves.Add(Location::RegisterLocation(RAX));
5657 locations->SetCustomSlowPathCallerSaves(caller_saves);
5658 } else {
5659 // For non-Baker read barrier we have a temp-clobbering call.
5660 }
5661 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005662 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005663}
5664
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005665Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005666 dex::StringIndex dex_index,
5667 Handle<mirror::String> handle) {
5668 jit_string_roots_.Overwrite(
5669 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005670 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005671 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005672 PatchInfo<Label>* info = &jit_string_patches_.back();
5673 return &info->label;
5674}
5675
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005676// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5677// move.
5678void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005679 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005680 Location out_loc = locations->Out();
5681 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005682
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005683 switch (load->GetLoadKind()) {
5684 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005685 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005686 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005687 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005688 return; // No dex cache slow path.
5689 }
5690 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005691 uint32_t address = dchecked_integral_cast<uint32_t>(
5692 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5693 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005694 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005695 return; // No dex cache slow path.
5696 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005697 case HLoadString::LoadKind::kBssEntry: {
5698 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5699 /* no_rip */ false);
5700 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5701 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005702 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005703 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5704 codegen_->AddSlowPath(slow_path);
5705 __ testl(out, out);
5706 __ j(kEqual, slow_path->GetEntryLabel());
5707 __ Bind(slow_path->GetExitLabel());
5708 return;
5709 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005710 case HLoadString::LoadKind::kJitTableAddress: {
5711 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5712 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005713 Label* fixup_label = codegen_->NewJitRootStringPatch(
5714 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005715 // /* GcRoot<mirror::String> */ out = *address
5716 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5717 return;
5718 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005719 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005720 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005721 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005722
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005723 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005724 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005725 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005726 codegen_->InvokeRuntime(kQuickResolveString,
5727 load,
5728 load->GetDexPc());
5729 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005730}
5731
David Brazdilcb1c0552015-08-04 16:22:25 +01005732static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005733 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005734 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005735}
5736
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005737void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5738 LocationSummary* locations =
5739 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5740 locations->SetOut(Location::RequiresRegister());
5741}
5742
5743void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005744 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5745}
5746
5747void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5748 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5749}
5750
5751void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5752 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005753}
5754
5755void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5756 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005757 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005758 InvokeRuntimeCallingConvention calling_convention;
5759 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5760}
5761
5762void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005763 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005764 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005765}
5766
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005767static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5768 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005769 // We need a temporary for holding the iftable length.
5770 return true;
5771 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005772 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005773 !kUseBakerReadBarrier &&
5774 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005775 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5776 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5777}
5778
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005779static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5780 return kEmitCompilerReadBarrier &&
5781 !kUseBakerReadBarrier &&
5782 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5783 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5784 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5785}
5786
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005787void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005788 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005789 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005790 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005791 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792 case TypeCheckKind::kExactCheck:
5793 case TypeCheckKind::kAbstractClassCheck:
5794 case TypeCheckKind::kClassHierarchyCheck:
5795 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005796 call_kind =
5797 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005798 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005799 break;
5800 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005801 case TypeCheckKind::kUnresolvedCheck:
5802 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005803 call_kind = LocationSummary::kCallOnSlowPath;
5804 break;
5805 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005806
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005807 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005808 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005809 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005810 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811 locations->SetInAt(0, Location::RequiresRegister());
5812 locations->SetInAt(1, Location::Any());
5813 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5814 locations->SetOut(Location::RequiresRegister());
5815 // When read barriers are enabled, we need a temporary register for
5816 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005817 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005819 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005820}
5821
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005822void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005823 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005824 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005825 Location obj_loc = locations->InAt(0);
5826 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005827 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005828 Location out_loc = locations->Out();
5829 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005830 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005831 locations->GetTemp(0) :
5832 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005833 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005834 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5835 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5836 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005837 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005838 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005839
5840 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005841 // Avoid null check if we know obj is not null.
5842 if (instruction->MustDoNullCheck()) {
5843 __ testl(obj, obj);
5844 __ j(kEqual, &zero);
5845 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005846
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005847 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005848 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005849 // /* HeapReference<Class> */ out = obj->klass_
5850 GenerateReferenceLoadTwoRegisters(instruction,
5851 out_loc,
5852 obj_loc,
5853 class_offset,
5854 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005855 if (cls.IsRegister()) {
5856 __ cmpl(out, cls.AsRegister<CpuRegister>());
5857 } else {
5858 DCHECK(cls.IsStackSlot()) << cls;
5859 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5860 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005861 if (zero.IsLinked()) {
5862 // Classes must be equal for the instanceof to succeed.
5863 __ j(kNotEqual, &zero);
5864 __ movl(out, Immediate(1));
5865 __ jmp(&done);
5866 } else {
5867 __ setcc(kEqual, out);
5868 // setcc only sets the low byte.
5869 __ andl(out, Immediate(1));
5870 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005871 break;
5872 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005873
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005874 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005875 // /* HeapReference<Class> */ out = obj->klass_
5876 GenerateReferenceLoadTwoRegisters(instruction,
5877 out_loc,
5878 obj_loc,
5879 class_offset,
5880 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 // If the class is abstract, we eagerly fetch the super class of the
5882 // object to avoid doing a comparison we know will fail.
5883 NearLabel loop, success;
5884 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005885 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005886 GenerateReferenceLoadOneRegister(instruction,
5887 out_loc,
5888 super_offset,
5889 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005890 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005891 __ testl(out, out);
5892 // If `out` is null, we use it for the result, and jump to `done`.
5893 __ j(kEqual, &done);
5894 if (cls.IsRegister()) {
5895 __ cmpl(out, cls.AsRegister<CpuRegister>());
5896 } else {
5897 DCHECK(cls.IsStackSlot()) << cls;
5898 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5899 }
5900 __ j(kNotEqual, &loop);
5901 __ movl(out, Immediate(1));
5902 if (zero.IsLinked()) {
5903 __ jmp(&done);
5904 }
5905 break;
5906 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005907
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005908 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005909 // /* HeapReference<Class> */ out = obj->klass_
5910 GenerateReferenceLoadTwoRegisters(instruction,
5911 out_loc,
5912 obj_loc,
5913 class_offset,
5914 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005915 // Walk over the class hierarchy to find a match.
5916 NearLabel loop, success;
5917 __ Bind(&loop);
5918 if (cls.IsRegister()) {
5919 __ cmpl(out, cls.AsRegister<CpuRegister>());
5920 } else {
5921 DCHECK(cls.IsStackSlot()) << cls;
5922 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5923 }
5924 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005925 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005926 GenerateReferenceLoadOneRegister(instruction,
5927 out_loc,
5928 super_offset,
5929 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005930 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005931 __ testl(out, out);
5932 __ j(kNotEqual, &loop);
5933 // If `out` is null, we use it for the result, and jump to `done`.
5934 __ jmp(&done);
5935 __ Bind(&success);
5936 __ movl(out, Immediate(1));
5937 if (zero.IsLinked()) {
5938 __ jmp(&done);
5939 }
5940 break;
5941 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005942
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005943 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005944 // /* HeapReference<Class> */ out = obj->klass_
5945 GenerateReferenceLoadTwoRegisters(instruction,
5946 out_loc,
5947 obj_loc,
5948 class_offset,
5949 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005950 // Do an exact check.
5951 NearLabel exact_check;
5952 if (cls.IsRegister()) {
5953 __ cmpl(out, cls.AsRegister<CpuRegister>());
5954 } else {
5955 DCHECK(cls.IsStackSlot()) << cls;
5956 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5957 }
5958 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005959 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005960 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005961 GenerateReferenceLoadOneRegister(instruction,
5962 out_loc,
5963 component_offset,
5964 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005965 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005966 __ testl(out, out);
5967 // If `out` is null, we use it for the result, and jump to `done`.
5968 __ j(kEqual, &done);
5969 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5970 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005971 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005972 __ movl(out, Immediate(1));
5973 __ jmp(&done);
5974 break;
5975 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005976
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005977 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005978 // No read barrier since the slow path will retry upon failure.
5979 // /* HeapReference<Class> */ out = obj->klass_
5980 GenerateReferenceLoadTwoRegisters(instruction,
5981 out_loc,
5982 obj_loc,
5983 class_offset,
5984 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005985 if (cls.IsRegister()) {
5986 __ cmpl(out, cls.AsRegister<CpuRegister>());
5987 } else {
5988 DCHECK(cls.IsStackSlot()) << cls;
5989 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5990 }
5991 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005992 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5993 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005994 codegen_->AddSlowPath(slow_path);
5995 __ j(kNotEqual, slow_path->GetEntryLabel());
5996 __ movl(out, Immediate(1));
5997 if (zero.IsLinked()) {
5998 __ jmp(&done);
5999 }
6000 break;
6001 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006002
Calin Juravle98893e12015-10-02 21:05:03 +01006003 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006004 case TypeCheckKind::kInterfaceCheck: {
6005 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006006 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006007 // cases.
6008 //
6009 // We cannot directly call the InstanceofNonTrivial runtime
6010 // entry point without resorting to a type checking slow path
6011 // here (i.e. by calling InvokeRuntime directly), as it would
6012 // require to assign fixed registers for the inputs of this
6013 // HInstanceOf instruction (following the runtime calling
6014 // convention), which might be cluttered by the potential first
6015 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006016 //
6017 // TODO: Introduce a new runtime entry point taking the object
6018 // to test (instead of its class) as argument, and let it deal
6019 // with the read barrier issues. This will let us refactor this
6020 // case of the `switch` code as it was previously (with a direct
6021 // call to the runtime not using a type checking slow path).
6022 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006023 DCHECK(locations->OnlyCallsOnSlowPath());
6024 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6025 /* is_fatal */ false);
6026 codegen_->AddSlowPath(slow_path);
6027 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006028 if (zero.IsLinked()) {
6029 __ jmp(&done);
6030 }
6031 break;
6032 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006033 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006034
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006035 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006036 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006037 __ xorl(out, out);
6038 }
6039
6040 if (done.IsLinked()) {
6041 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006042 }
6043
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006044 if (slow_path != nullptr) {
6045 __ Bind(slow_path->GetExitLabel());
6046 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006047}
6048
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006049static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006050 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006051 case TypeCheckKind::kExactCheck:
6052 case TypeCheckKind::kAbstractClassCheck:
6053 case TypeCheckKind::kClassHierarchyCheck:
6054 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006055 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006056 case TypeCheckKind::kInterfaceCheck:
6057 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006058 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006059 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006060 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006061 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006062 LOG(FATAL) << "Unreachable";
6063 UNREACHABLE();
6064}
6065
6066void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6067 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6068 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6069 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6070 LocationSummary::CallKind call_kind = is_fatal_slow_path
6071 ? LocationSummary::kNoCall
6072 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006073 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6074 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006075 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6076 // Require a register for the interface check since there is a loop that compares the class to
6077 // a memory address.
6078 locations->SetInAt(1, Location::RequiresRegister());
6079 } else {
6080 locations->SetInAt(1, Location::Any());
6081 }
6082
Roland Levillain0d5a2812015-11-13 10:07:31 +00006083 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6084 locations->AddTemp(Location::RequiresRegister());
6085 // When read barriers are enabled, we need an additional temporary
6086 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006087 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006088 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006089 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006090}
6091
6092void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006093 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006094 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006095 Location obj_loc = locations->InAt(0);
6096 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006097 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006098 Location temp_loc = locations->GetTemp(0);
6099 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006100 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006101 locations->GetTemp(1) :
6102 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006103 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6104 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6105 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6106 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6107 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6108 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006109 const uint32_t object_array_data_offset =
6110 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006111
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006112 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6113 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6114 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006115 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006116 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006117 SlowPathCode* type_check_slow_path =
6118 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6119 is_type_check_slow_path_fatal);
6120 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006121
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006122
6123 NearLabel done;
6124 // Avoid null check if we know obj is not null.
6125 if (instruction->MustDoNullCheck()) {
6126 __ testl(obj, obj);
6127 __ j(kEqual, &done);
6128 }
6129
Roland Levillain0d5a2812015-11-13 10:07:31 +00006130 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006131 case TypeCheckKind::kExactCheck:
6132 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006133 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006134 GenerateReferenceLoadTwoRegisters(instruction,
6135 temp_loc,
6136 obj_loc,
6137 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006138 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006139 if (cls.IsRegister()) {
6140 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6141 } else {
6142 DCHECK(cls.IsStackSlot()) << cls;
6143 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6144 }
6145 // Jump to slow path for throwing the exception or doing a
6146 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006147 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006148 break;
6149 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006150
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006151 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006152 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006153 GenerateReferenceLoadTwoRegisters(instruction,
6154 temp_loc,
6155 obj_loc,
6156 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006157 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006158 // If the class is abstract, we eagerly fetch the super class of the
6159 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006160 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006161 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006162 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006163 GenerateReferenceLoadOneRegister(instruction,
6164 temp_loc,
6165 super_offset,
6166 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006167 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006168
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006169 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6170 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006171 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006172 // Otherwise, compare the classes.
6173 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006174 if (cls.IsRegister()) {
6175 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6176 } else {
6177 DCHECK(cls.IsStackSlot()) << cls;
6178 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6179 }
6180 __ j(kNotEqual, &loop);
6181 break;
6182 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006183
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006184 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006185 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006186 GenerateReferenceLoadTwoRegisters(instruction,
6187 temp_loc,
6188 obj_loc,
6189 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006190 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006191 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006192 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006193 __ Bind(&loop);
6194 if (cls.IsRegister()) {
6195 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6196 } else {
6197 DCHECK(cls.IsStackSlot()) << cls;
6198 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6199 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006200 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006201
Roland Levillain0d5a2812015-11-13 10:07:31 +00006202 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006203 GenerateReferenceLoadOneRegister(instruction,
6204 temp_loc,
6205 super_offset,
6206 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006207 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006208
6209 // If the class reference currently in `temp` is not null, jump
6210 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006211 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006212 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006213 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006214 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006215 break;
6216 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006217
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006218 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006219 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006220 GenerateReferenceLoadTwoRegisters(instruction,
6221 temp_loc,
6222 obj_loc,
6223 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006224 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006225 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006226 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006227 if (cls.IsRegister()) {
6228 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6229 } else {
6230 DCHECK(cls.IsStackSlot()) << cls;
6231 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6232 }
6233 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006234
6235 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006236 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006237 GenerateReferenceLoadOneRegister(instruction,
6238 temp_loc,
6239 component_offset,
6240 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006241 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006242
6243 // If the component type is not null (i.e. the object is indeed
6244 // an array), jump to label `check_non_primitive_component_type`
6245 // to further check that this component type is not a primitive
6246 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006247 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006248 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006249 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006250 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006251 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006252 break;
6253 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006254
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006255 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006256 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006257 //
6258 // We cannot directly call the CheckCast runtime entry point
6259 // without resorting to a type checking slow path here (i.e. by
6260 // calling InvokeRuntime directly), as it would require to
6261 // assign fixed registers for the inputs of this HInstanceOf
6262 // instruction (following the runtime calling convention), which
6263 // might be cluttered by the potential first read barrier
6264 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006265 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006266 break;
6267 }
6268
6269 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006270 // Fast path for the interface check. We always go slow path for heap poisoning since
6271 // unpoisoning cls would require an extra temp.
6272 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006273 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6274 // doing this.
6275 // /* HeapReference<Class> */ temp = obj->klass_
6276 GenerateReferenceLoadTwoRegisters(instruction,
6277 temp_loc,
6278 obj_loc,
6279 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006280 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006281
6282 // /* HeapReference<Class> */ temp = temp->iftable_
6283 GenerateReferenceLoadTwoRegisters(instruction,
6284 temp_loc,
6285 temp_loc,
6286 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006287 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006288 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006289 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006290 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006291 NearLabel start_loop;
6292 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006293 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006294 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006295 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6296 // Go to next interface if the classes do not match.
6297 __ cmpl(cls.AsRegister<CpuRegister>(),
6298 CodeGeneratorX86_64::ArrayAddress(temp,
6299 maybe_temp2_loc,
6300 TIMES_4,
6301 object_array_data_offset));
6302 __ j(kNotEqual, &start_loop); // Return if same class.
6303 } else {
6304 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006305 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006306 break;
6307 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006308
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006309 if (done.IsLinked()) {
6310 __ Bind(&done);
6311 }
6312
Roland Levillain0d5a2812015-11-13 10:07:31 +00006313 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006314}
6315
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006316void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6317 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006318 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006319 InvokeRuntimeCallingConvention calling_convention;
6320 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6321}
6322
6323void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006324 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006325 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006326 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006327 if (instruction->IsEnter()) {
6328 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6329 } else {
6330 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6331 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006332}
6333
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006334void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6335void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6336void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6337
6338void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6339 LocationSummary* locations =
6340 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6341 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6342 || instruction->GetResultType() == Primitive::kPrimLong);
6343 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006344 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006345 locations->SetOut(Location::SameAsFirstInput());
6346}
6347
6348void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6349 HandleBitwiseOperation(instruction);
6350}
6351
6352void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6353 HandleBitwiseOperation(instruction);
6354}
6355
6356void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6357 HandleBitwiseOperation(instruction);
6358}
6359
6360void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6361 LocationSummary* locations = instruction->GetLocations();
6362 Location first = locations->InAt(0);
6363 Location second = locations->InAt(1);
6364 DCHECK(first.Equals(locations->Out()));
6365
6366 if (instruction->GetResultType() == Primitive::kPrimInt) {
6367 if (second.IsRegister()) {
6368 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006369 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006370 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006371 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006372 } else {
6373 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006374 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006375 }
6376 } else if (second.IsConstant()) {
6377 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6378 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006379 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006380 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006381 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006382 } else {
6383 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006384 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006385 }
6386 } else {
6387 Address address(CpuRegister(RSP), second.GetStackIndex());
6388 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006389 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006390 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006391 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006392 } else {
6393 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006394 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006395 }
6396 }
6397 } else {
6398 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006399 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6400 bool second_is_constant = false;
6401 int64_t value = 0;
6402 if (second.IsConstant()) {
6403 second_is_constant = true;
6404 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006405 }
Mark Mendell40741f32015-04-20 22:10:34 -04006406 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006407
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006408 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006409 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006410 if (is_int32_value) {
6411 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6412 } else {
6413 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6414 }
6415 } else if (second.IsDoubleStackSlot()) {
6416 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006417 } else {
6418 __ andq(first_reg, second.AsRegister<CpuRegister>());
6419 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006420 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006421 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006422 if (is_int32_value) {
6423 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6424 } else {
6425 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6426 }
6427 } else if (second.IsDoubleStackSlot()) {
6428 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006429 } else {
6430 __ orq(first_reg, second.AsRegister<CpuRegister>());
6431 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006432 } else {
6433 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006434 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006435 if (is_int32_value) {
6436 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6437 } else {
6438 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6439 }
6440 } else if (second.IsDoubleStackSlot()) {
6441 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006442 } else {
6443 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6444 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006445 }
6446 }
6447}
6448
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006449void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6450 HInstruction* instruction,
6451 Location out,
6452 uint32_t offset,
6453 Location maybe_temp,
6454 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006455 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006456 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006457 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006458 if (kUseBakerReadBarrier) {
6459 // Load with fast path based Baker's read barrier.
6460 // /* HeapReference<Object> */ out = *(out + offset)
6461 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006462 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006463 } else {
6464 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006465 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006466 // in the following move operation, as we will need it for the
6467 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006468 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006469 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006470 // /* HeapReference<Object> */ out = *(out + offset)
6471 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006472 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006473 }
6474 } else {
6475 // Plain load with no read barrier.
6476 // /* HeapReference<Object> */ out = *(out + offset)
6477 __ movl(out_reg, Address(out_reg, offset));
6478 __ MaybeUnpoisonHeapReference(out_reg);
6479 }
6480}
6481
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006482void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6483 HInstruction* instruction,
6484 Location out,
6485 Location obj,
6486 uint32_t offset,
6487 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006488 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6489 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006490 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006491 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006492 if (kUseBakerReadBarrier) {
6493 // Load with fast path based Baker's read barrier.
6494 // /* HeapReference<Object> */ out = *(obj + offset)
6495 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006496 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006497 } else {
6498 // Load with slow path based read barrier.
6499 // /* HeapReference<Object> */ out = *(obj + offset)
6500 __ movl(out_reg, Address(obj_reg, offset));
6501 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6502 }
6503 } else {
6504 // Plain load with no read barrier.
6505 // /* HeapReference<Object> */ out = *(obj + offset)
6506 __ movl(out_reg, Address(obj_reg, offset));
6507 __ MaybeUnpoisonHeapReference(out_reg);
6508 }
6509}
6510
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006511void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6512 HInstruction* instruction,
6513 Location root,
6514 const Address& address,
6515 Label* fixup_label,
6516 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006517 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006518 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006519 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006520 if (kUseBakerReadBarrier) {
6521 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6522 // Baker's read barrier are used:
6523 //
Roland Levillaind966ce72017-02-09 16:20:14 +00006524 // root = obj.field;
6525 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6526 // if (temp != null) {
6527 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528 // }
6529
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006530 // /* GcRoot<mirror::Object> */ root = *address
6531 __ movl(root_reg, address);
6532 if (fixup_label != nullptr) {
6533 __ Bind(fixup_label);
6534 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006535 static_assert(
6536 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6537 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6538 "have different sizes.");
6539 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6540 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6541 "have different sizes.");
6542
Vladimir Marko953437b2016-08-24 08:30:46 +00006543 // Slow path marking the GC root `root`.
6544 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006545 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006546 codegen_->AddSlowPath(slow_path);
6547
Roland Levillaind966ce72017-02-09 16:20:14 +00006548 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
6549 const int32_t entry_point_offset =
6550 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
6551 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip */ true), Immediate(0));
6552 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006553 __ j(kNotEqual, slow_path->GetEntryLabel());
6554 __ Bind(slow_path->GetExitLabel());
6555 } else {
6556 // GC root loaded through a slow path for read barriers other
6557 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006558 // /* GcRoot<mirror::Object>* */ root = address
6559 __ leaq(root_reg, address);
6560 if (fixup_label != nullptr) {
6561 __ Bind(fixup_label);
6562 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006563 // /* mirror::Object* */ root = root->Read()
6564 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6565 }
6566 } else {
6567 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006568 // /* GcRoot<mirror::Object> */ root = *address
6569 __ movl(root_reg, address);
6570 if (fixup_label != nullptr) {
6571 __ Bind(fixup_label);
6572 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006573 // Note that GC roots are not affected by heap poisoning, thus we
6574 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006575 }
6576}
6577
6578void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6579 Location ref,
6580 CpuRegister obj,
6581 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582 bool needs_null_check) {
6583 DCHECK(kEmitCompilerReadBarrier);
6584 DCHECK(kUseBakerReadBarrier);
6585
6586 // /* HeapReference<Object> */ ref = *(obj + offset)
6587 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006588 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006589}
6590
6591void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6592 Location ref,
6593 CpuRegister obj,
6594 uint32_t data_offset,
6595 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006596 bool needs_null_check) {
6597 DCHECK(kEmitCompilerReadBarrier);
6598 DCHECK(kUseBakerReadBarrier);
6599
Roland Levillain3d312422016-06-23 13:53:42 +01006600 static_assert(
6601 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6602 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006603 // /* HeapReference<Object> */ ref =
6604 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006605 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006606 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006607}
6608
6609void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6610 Location ref,
6611 CpuRegister obj,
6612 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006613 bool needs_null_check,
6614 bool always_update_field,
6615 CpuRegister* temp1,
6616 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006617 DCHECK(kEmitCompilerReadBarrier);
6618 DCHECK(kUseBakerReadBarrier);
6619
6620 // In slow path based read barriers, the read barrier call is
6621 // inserted after the original load. However, in fast path based
6622 // Baker's read barriers, we need to perform the load of
6623 // mirror::Object::monitor_ *before* the original reference load.
6624 // This load-load ordering is required by the read barrier.
6625 // The fast path/slow path (for Baker's algorithm) should look like:
6626 //
6627 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6628 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6629 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006630 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006631 // if (is_gray) {
6632 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6633 // }
6634 //
6635 // Note: the original implementation in ReadBarrier::Barrier is
6636 // slightly more complex as:
6637 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006638 // the high-bits of rb_state, which are expected to be all zeroes
6639 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6640 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006641 // - it performs additional checks that we do not do here for
6642 // performance reasons.
6643
6644 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006645 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6646
Vladimir Marko953437b2016-08-24 08:30:46 +00006647 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006648 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6649 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006650 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6651 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6652 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6653
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006654 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006655 // ref = ReadBarrier::Mark(ref);
6656 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6657 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006658 if (needs_null_check) {
6659 MaybeRecordImplicitNullCheck(instruction);
6660 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006661
6662 // Load fence to prevent load-load reordering.
6663 // Note that this is a no-op, thanks to the x86-64 memory model.
6664 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6665
6666 // The actual reference load.
6667 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006668 __ movl(ref_reg, src); // Flags are unaffected.
6669
6670 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6671 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006672 SlowPathCode* slow_path;
6673 if (always_update_field) {
6674 DCHECK(temp1 != nullptr);
6675 DCHECK(temp2 != nullptr);
6676 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6677 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6678 } else {
6679 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6680 instruction, ref, /* unpoison_ref_before_marking */ true);
6681 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006682 AddSlowPath(slow_path);
6683
6684 // We have done the "if" of the gray bit check above, now branch based on the flags.
6685 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006686
6687 // Object* ref = ref_addr->AsMirrorPtr()
6688 __ MaybeUnpoisonHeapReference(ref_reg);
6689
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006690 __ Bind(slow_path->GetExitLabel());
6691}
6692
6693void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6694 Location out,
6695 Location ref,
6696 Location obj,
6697 uint32_t offset,
6698 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006699 DCHECK(kEmitCompilerReadBarrier);
6700
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006701 // Insert a slow path based read barrier *after* the reference load.
6702 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006703 // If heap poisoning is enabled, the unpoisoning of the loaded
6704 // reference will be carried out by the runtime within the slow
6705 // path.
6706 //
6707 // Note that `ref` currently does not get unpoisoned (when heap
6708 // poisoning is enabled), which is alright as the `ref` argument is
6709 // not used by the artReadBarrierSlow entry point.
6710 //
6711 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6712 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6713 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6714 AddSlowPath(slow_path);
6715
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 __ jmp(slow_path->GetEntryLabel());
6717 __ Bind(slow_path->GetExitLabel());
6718}
6719
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006720void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6721 Location out,
6722 Location ref,
6723 Location obj,
6724 uint32_t offset,
6725 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006726 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006727 // Baker's read barriers shall be handled by the fast path
6728 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6729 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006730 // If heap poisoning is enabled, unpoisoning will be taken care of
6731 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006732 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006733 } else if (kPoisonHeapReferences) {
6734 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6735 }
6736}
6737
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006738void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6739 Location out,
6740 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006741 DCHECK(kEmitCompilerReadBarrier);
6742
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006743 // Insert a slow path based read barrier *after* the GC root load.
6744 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745 // Note that GC roots are not affected by heap poisoning, so we do
6746 // not need to do anything special for this here.
6747 SlowPathCode* slow_path =
6748 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6749 AddSlowPath(slow_path);
6750
Roland Levillain0d5a2812015-11-13 10:07:31 +00006751 __ jmp(slow_path->GetEntryLabel());
6752 __ Bind(slow_path->GetExitLabel());
6753}
6754
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006755void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006756 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006757 LOG(FATAL) << "Unreachable";
6758}
6759
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006760void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006761 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006762 LOG(FATAL) << "Unreachable";
6763}
6764
Mark Mendellfe57faa2015-09-18 09:26:15 -04006765// Simple implementation of packed switch - generate cascaded compare/jumps.
6766void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6767 LocationSummary* locations =
6768 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6769 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006770 locations->AddTemp(Location::RequiresRegister());
6771 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006772}
6773
6774void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6775 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006776 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006777 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006778 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6779 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6780 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006781 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6782
6783 // Should we generate smaller inline compare/jumps?
6784 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6785 // Figure out the correct compare values and jump conditions.
6786 // Handle the first compare/branch as a special case because it might
6787 // jump to the default case.
6788 DCHECK_GT(num_entries, 2u);
6789 Condition first_condition;
6790 uint32_t index;
6791 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6792 if (lower_bound != 0) {
6793 first_condition = kLess;
6794 __ cmpl(value_reg_in, Immediate(lower_bound));
6795 __ j(first_condition, codegen_->GetLabelOf(default_block));
6796 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6797
6798 index = 1;
6799 } else {
6800 // Handle all the compare/jumps below.
6801 first_condition = kBelow;
6802 index = 0;
6803 }
6804
6805 // Handle the rest of the compare/jumps.
6806 for (; index + 1 < num_entries; index += 2) {
6807 int32_t compare_to_value = lower_bound + index + 1;
6808 __ cmpl(value_reg_in, Immediate(compare_to_value));
6809 // Jump to successors[index] if value < case_value[index].
6810 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6811 // Jump to successors[index + 1] if value == case_value[index + 1].
6812 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6813 }
6814
6815 if (index != num_entries) {
6816 // There are an odd number of entries. Handle the last one.
6817 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006818 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006819 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6820 }
6821
6822 // And the default for any other value.
6823 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6824 __ jmp(codegen_->GetLabelOf(default_block));
6825 }
6826 return;
6827 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006828
6829 // Remove the bias, if needed.
6830 Register value_reg_out = value_reg_in.AsRegister();
6831 if (lower_bound != 0) {
6832 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6833 value_reg_out = temp_reg.AsRegister();
6834 }
6835 CpuRegister value_reg(value_reg_out);
6836
6837 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006838 __ cmpl(value_reg, Immediate(num_entries - 1));
6839 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006840
Mark Mendell9c86b482015-09-18 13:36:07 -04006841 // We are in the range of the table.
6842 // Load the address of the jump table in the constant area.
6843 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006844
Mark Mendell9c86b482015-09-18 13:36:07 -04006845 // Load the (signed) offset from the jump table.
6846 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6847
6848 // Add the offset to the address of the table base.
6849 __ addq(temp_reg, base_reg);
6850
6851 // And jump.
6852 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006853}
6854
Aart Bikc5d47542016-01-27 17:00:35 -08006855void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6856 if (value == 0) {
6857 __ xorl(dest, dest);
6858 } else {
6859 __ movl(dest, Immediate(value));
6860 }
6861}
6862
Mark Mendell92e83bf2015-05-07 11:25:03 -04006863void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6864 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006865 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006866 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006867 } else if (IsUint<32>(value)) {
6868 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006869 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6870 } else {
6871 __ movq(dest, Immediate(value));
6872 }
6873}
6874
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006875void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6876 if (value == 0) {
6877 __ xorps(dest, dest);
6878 } else {
6879 __ movss(dest, LiteralInt32Address(value));
6880 }
6881}
6882
6883void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6884 if (value == 0) {
6885 __ xorpd(dest, dest);
6886 } else {
6887 __ movsd(dest, LiteralInt64Address(value));
6888 }
6889}
6890
6891void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6892 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6893}
6894
6895void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6896 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6897}
6898
Aart Bika19616e2016-02-01 18:57:58 -08006899void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6900 if (value == 0) {
6901 __ testl(dest, dest);
6902 } else {
6903 __ cmpl(dest, Immediate(value));
6904 }
6905}
6906
6907void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6908 if (IsInt<32>(value)) {
6909 if (value == 0) {
6910 __ testq(dest, dest);
6911 } else {
6912 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6913 }
6914 } else {
6915 // Value won't fit in an int.
6916 __ cmpq(dest, LiteralInt64Address(value));
6917 }
6918}
6919
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006920void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6921 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006922 GenerateIntCompare(lhs_reg, rhs);
6923}
6924
6925void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006926 if (rhs.IsConstant()) {
6927 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006928 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006929 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006930 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006931 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006932 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006933 }
6934}
6935
6936void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6937 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6938 if (rhs.IsConstant()) {
6939 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6940 Compare64BitValue(lhs_reg, value);
6941 } else if (rhs.IsDoubleStackSlot()) {
6942 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6943 } else {
6944 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6945 }
6946}
6947
6948Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6949 Location index,
6950 ScaleFactor scale,
6951 uint32_t data_offset) {
6952 return index.IsConstant() ?
6953 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6954 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6955}
6956
Mark Mendellcfa410b2015-05-25 16:02:44 -04006957void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6958 DCHECK(dest.IsDoubleStackSlot());
6959 if (IsInt<32>(value)) {
6960 // Can move directly as an int32 constant.
6961 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6962 Immediate(static_cast<int32_t>(value)));
6963 } else {
6964 Load64BitValue(CpuRegister(TMP), value);
6965 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6966 }
6967}
6968
Mark Mendell9c86b482015-09-18 13:36:07 -04006969/**
6970 * Class to handle late fixup of offsets into constant area.
6971 */
6972class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6973 public:
6974 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6975 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6976
6977 protected:
6978 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6979
6980 CodeGeneratorX86_64* codegen_;
6981
6982 private:
6983 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6984 // Patch the correct offset for the instruction. We use the address of the
6985 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6986 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6987 int32_t relative_position = constant_offset - pos;
6988
6989 // Patch in the right value.
6990 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6991 }
6992
6993 // Location in constant area that the fixup refers to.
6994 size_t offset_into_constant_area_;
6995};
6996
6997/**
6998 t * Class to handle late fixup of offsets to a jump table that will be created in the
6999 * constant area.
7000 */
7001class JumpTableRIPFixup : public RIPFixup {
7002 public:
7003 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7004 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7005
7006 void CreateJumpTable() {
7007 X86_64Assembler* assembler = codegen_->GetAssembler();
7008
7009 // Ensure that the reference to the jump table has the correct offset.
7010 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7011 SetOffset(offset_in_constant_table);
7012
7013 // Compute the offset from the start of the function to this jump table.
7014 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7015
7016 // Populate the jump table with the correct values for the jump table.
7017 int32_t num_entries = switch_instr_->GetNumEntries();
7018 HBasicBlock* block = switch_instr_->GetBlock();
7019 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7020 // The value that we want is the target offset - the position of the table.
7021 for (int32_t i = 0; i < num_entries; i++) {
7022 HBasicBlock* b = successors[i];
7023 Label* l = codegen_->GetLabelOf(b);
7024 DCHECK(l->IsBound());
7025 int32_t offset_to_block = l->Position() - current_table_offset;
7026 assembler->AppendInt32(offset_to_block);
7027 }
7028 }
7029
7030 private:
7031 const HPackedSwitch* switch_instr_;
7032};
7033
Mark Mendellf55c3e02015-03-26 21:07:46 -04007034void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7035 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007036 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007037 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7038 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007039 assembler->Align(4, 0);
7040 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007041
7042 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007043 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007044 jump_table->CreateJumpTable();
7045 }
7046
7047 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007048 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007049 }
7050
7051 // And finish up.
7052 CodeGenerator::Finalize(allocator);
7053}
7054
Mark Mendellf55c3e02015-03-26 21:07:46 -04007055Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7056 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7057 return Address::RIP(fixup);
7058}
7059
7060Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7061 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7062 return Address::RIP(fixup);
7063}
7064
7065Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7066 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7067 return Address::RIP(fixup);
7068}
7069
7070Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7071 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7072 return Address::RIP(fixup);
7073}
7074
Andreas Gampe85b62f22015-09-09 13:15:38 -07007075// TODO: trg as memory.
7076void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7077 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007078 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007079 return;
7080 }
7081
7082 DCHECK_NE(type, Primitive::kPrimVoid);
7083
7084 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7085 if (trg.Equals(return_loc)) {
7086 return;
7087 }
7088
7089 // Let the parallel move resolver take care of all of this.
7090 HParallelMove parallel_move(GetGraph()->GetArena());
7091 parallel_move.AddMove(return_loc, trg, type, nullptr);
7092 GetMoveResolver()->EmitNativeCode(&parallel_move);
7093}
7094
Mark Mendell9c86b482015-09-18 13:36:07 -04007095Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7096 // Create a fixup to be used to create and address the jump table.
7097 JumpTableRIPFixup* table_fixup =
7098 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7099
7100 // We have to populate the jump tables.
7101 fixups_to_jump_tables_.push_back(table_fixup);
7102 return Address::RIP(table_fixup);
7103}
7104
Mark Mendellea5af682015-10-22 17:35:49 -04007105void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7106 const Address& addr_high,
7107 int64_t v,
7108 HInstruction* instruction) {
7109 if (IsInt<32>(v)) {
7110 int32_t v_32 = v;
7111 __ movq(addr_low, Immediate(v_32));
7112 MaybeRecordImplicitNullCheck(instruction);
7113 } else {
7114 // Didn't fit in a register. Do it in pieces.
7115 int32_t low_v = Low32Bits(v);
7116 int32_t high_v = High32Bits(v);
7117 __ movl(addr_low, Immediate(low_v));
7118 MaybeRecordImplicitNullCheck(instruction);
7119 __ movl(addr_high, Immediate(high_v));
7120 }
7121}
7122
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007123void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7124 const uint8_t* roots_data,
7125 const PatchInfo<Label>& info,
7126 uint64_t index_in_table) const {
7127 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7128 uintptr_t address =
7129 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7130 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7131 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7132 dchecked_integral_cast<uint32_t>(address);
7133}
7134
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007135void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7136 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007137 const auto it = jit_string_roots_.find(
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007138 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007139 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007140 uint64_t index_in_table = it->second;
7141 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007142 }
7143
7144 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007145 const auto it = jit_class_roots_.find(
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007146 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7147 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007148 uint64_t index_in_table = it->second;
7149 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007150 }
7151}
7152
Roland Levillain4d027112015-07-01 15:41:14 +01007153#undef __
7154
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007155} // namespace x86_64
7156} // namespace art